In [1]:
import os
import h5py
import numpy as np
import os
import numpy as np
import matplotlib.pyplot as plt
import pickle
import sys
from collections import namedtuple
from scipy.ndimage import gaussian_filter
from skimage.registration import phase_cross_correlation
from scipy.ndimage import fourier_shift
import math
In [2]:
import numpy as np
import random
import matplotlib.pyplot as plt
import tensorflow as tf
from sklearn.metrics import mean_squared_error
from tensorflow.keras import Model, layers
from tensorflow.keras.models import Sequential
from tensorflow.keras.initializers import GlorotUniform
import tensorflow as tf

ki = tf.keras.initializers.RandomNormal()

class CL_ConvNeuralNet(Model):
   
    def __init__(self, input_dims = [14,2], output_dims=[12]):
        
        self.initializer =  tf.keras.initializers.he_uniform()
        self.input_dims = input_dims
        self.output_dims = output_dims
        super(CL_ConvNeuralNet, self).__init__()
        self.model = self.build_model()
        self.optimizer = tf.keras.optimizers.RMSprop()

    def build_model(self):
        InputImage = layers.Input(shape=(self.input_dims[0],1))
        InputNumeric = layers.Input(shape=(self.input_dims[1]))
        cnet = layers.Dense(512, activation=tf.nn.relu,
                               kernel_initializer=self.initializer )(InputImage)

        cnet = layers.Dense(512, activation=tf.nn.relu,
                               kernel_initializer=self.initializer )(cnet)
        
        cnet = layers.Dense(256, activation=tf.nn.relu,
                               kernel_initializer=self.initializer )(cnet)
        
        
        cnet = layers.Flatten()(cnet)
        
        cnet = Model(inputs=InputImage, outputs=cnet)

        numeric = layers.Dense(256, activation=tf.nn.relu,
                               kernel_initializer=self.initializer )(InputNumeric)

        numeric = layers.Dense(256, activation=tf.nn.relu,
                               kernel_initializer=self.initializer )(numeric)
        
        numeric = layers.Dense(256, activation=tf.nn.relu,
                               kernel_initializer=self.initializer )(numeric)

        numeric = Model(inputs=InputNumeric, outputs=numeric)

        combined = layers.concatenate([cnet.output, numeric.output])
        
        x = layers.Dense(512,activation=tf.nn.relu, kernel_initializer=self.initializer)(combined)
        x = layers.Dense(256,activation=tf.nn.relu, kernel_initializer=self.initializer)(x)
        combined_network = layers.Dense(self.output_dims[0],activation='linear', 
                                        kernel_initializer=self.initializer)(x)
    
        model = Model(inputs=[cnet.input, numeric.input], outputs=combined_network)

        return model
    
    # define forward pass
    def call(self, inputs):
        prediction = self.model(inputs)[:,:]

        return prediction
2023-05-23 11:01:47.281706: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations:  AVX2 AVX512F AVX512_VNNI FMA
To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
In [3]:
import os
import numpy as np
import matplotlib.pyplot as plt
import pickle
import sys
from collections import namedtuple
from scipy.ndimage import gaussian_filter
from skimage.registration import phase_cross_correlation
from scipy.ndimage import fourier_shift
%matplotlib inline
start_pix = 55
end_pix = 70

from scipy import interpolate

def smooth_window(data, window_size):
    return np.convolve(data, np.ones((window_size,))/window_size, mode='valid')

def return_norm_wall_loc(am_img, start_pix = start_pix, end_pix = end_pix,
                        window_size=1):
    
    wps = np.zeros(shape=am_img.shape[0])
    for ind in range(am_img.shape[0]):
        wall_profile = smooth_window(am_img[ind,start_pix:end_pix], window_size)
        f = interpolate.interp1d(np.arange(len(wall_profile)), wall_profile, kind = 'nearest')
        xnew = np.linspace(0, len(wall_profile)-1, 128*10, endpoint = True)
        norm_wall_profile = np.argmax(np.diff(f(xnew)))
        subpix_max = xnew[norm_wall_profile]
        wps[ind] = subpix_max
    return wps


def normalize_images(input_images):
    return (input_images - np.min(input_images)) / (np.max(input_images) - np.min(input_images))

Dataset 1¶

In [4]:
folder = r'/Users/bry/Dropbox'
file_name = 'wall_pulsing_revised_smaller_PTO_40deg.p'
path_to_file = os.path.join(folder, file_name)
data = pickle.load(open(path_to_file, 'rb'))
In [5]:
data_collected = data['results']
wall_bias_locs = data['wall_locs']

pix = 128
reset_freq = 10
max_bias = 10
max_pw = 500
window_size=3

local_win_size = 7
min_ind = 0.046875 
max_ind = 0.9 
In [6]:
phase_images = []
amp_images = []

for ind in range(len(data_collected)):
    output=np.asarray(data_collected[ind])
    amp_img = output[2].reshape(-1, pix*2)
    phase_img = output[3].reshape(-1, pix*2)

    amp_images.append(amp_img[:,:pix])
    phase_images.append(phase_img[:,:pix]) 
In [7]:
l=0

actions = []
actions_norm = []

index_tracker =[]

for ind in range(len(data_collected)):

    
    if ind%reset_freq!=0:
        xpos,ypos = wall_bias_locs[l][3], wall_bias_locs[l][4] 
        bias_amp, bias_pw = wall_bias_locs[l][1], wall_bias_locs[l][2]


        xpos_norm = xpos/pix
        ypos_norm = ypos/pix
        bias_amp_norm = bias_amp/max_bias
        bias_pw_norm = bias_pw/max_pw
        index_tracker.append((ind,l))
        l+=1
    else:
        xpos = np.nan
        ypos = np.nan
        xpos_norm = np.nan
        ypos_norm = np.nan
        bias_amp = np.nan
        bias_pw = np.nan
        bias_amp_norm = np.nan
        bias_pw_norm = np.nan
        zero = np.nan
        one = np.nan
        two = np.nan
        three = np.nan
    
    actions.append([xpos,ypos, bias_amp, bias_pw])
    actions_norm.append([xpos_norm, ypos_norm, bias_amp_norm, bias_pw_norm])

Dataset 2¶

In [8]:
path = r'/Users/bry/Dropbox/New effort November 2022/wall pulsing/BE Version/'

all_img_bias=[]
for k in range(300,604):
    try:
        file_name_0 = path + 'Transition_k=' + str(k) + '.h5'
        h5_f0 = h5py.File(file_name_0, 'r+')
        bias_details0 = h5_f0['Measurement_000'].attrs['bias_details']
        h5_f0.close()
        bias_details0 = np.insert(bias_details0, 0, k)
        all_img_bias.append(bias_details0)
        k+=1
    except:
        print("Something went wrong:", k)
        k+=1
Something went wrong: 300
Something went wrong: 301
In [9]:
m=0
amp = []
phase = []
for k in range(300,604): #start,stop,step
    try:
        file_name_0 = path + 'Transition_k=' + str(k) + '.h5'
        h5_f0 = h5py.File(file_name_0, 'r+')
        qf0 = h5_f0['Measurement_000']['sho_fit']['sho_fit']

        imamp = qf0[:,:,0]
        imphase = qf0[:,:,3]
        amp.append(imamp)
        phase.append(imphase)
        h5_f0.close()
        m+=1
    except:
        print("Something went wrong:", k)
Something went wrong: 300
Something went wrong: 301
Something went wrong: 303
Something went wrong: 304
In [10]:
#We only want the images that have associated actions, so ignore the first few
all_img_bias = all_img_bias[2:]

amp_images_new = amp 
phase_images_new = phase 
In [11]:
params={'xtick.labelsize':'x-large'}
plt.rcParams.update(params)
plt.rcParams['figure.dpi']=300

Dataset 1¶

In [12]:
phase_images_segmented = np.copy(amp_images)
phase_images_segmented = normalize_images(phase_images_segmented)
phase_images_segmented[phase_images_segmented<0.2] = 0
phase_images_segmented[phase_images_segmented>=0.4] = 1
In [13]:
transitions = []
transitions_norm = []
transitions_profiles = []
for ind in range(1, len(phase_images)):
    tnew = namedtuple('Transition', ['state','action', 'next_state'])
    tnew_norm = namedtuple('Transition', ['state','action', 'next_state'])
    tnew_prof = namedtuple('Transition', ['state','action', 'next_state'])
    
    tnew.state = phase_images_segmented[ind-1]
    tnew.next_state = phase_images_segmented[ind]
    tnew.action = actions[ind]
    
    tnew_norm.state = phase_images_segmented[ind-1]
    tnew_norm.next_state = phase_images_segmented[ind]
    tnew_norm.action = actions_norm[ind]
    
    state = tnew.state
    next_state = tnew.next_state
    shift, error, diffphase = phase_cross_correlation(state, next_state,
                                                  upsample_factor=3)
    offset_image = fourier_shift(np.fft.fftn(next_state), shift)
    offset_image = np.fft.ifftn(offset_image)
    next_state = offset_image.real
    wps = return_norm_wall_loc(state, window_size=window_size) + start_pix
    wps_next = return_norm_wall_loc(next_state,window_size=window_size) + start_pix
    
    tnew_prof.state = wps
    tnew_prof.action = tnew_norm.action
    tnew_prof.next_state = wps_next
    
    if not np.isnan(tnew.action[0]) and not np.isnan(actions[ind+1][0]):
        transitions.append(tnew)
        transitions_norm.append(tnew_norm)
        transitions_profiles.append(tnew_prof)

Dataset 2¶

In [14]:
l=0
bpw = []
bamp = []
for ind in range(1,301): 
    
    bias_amp, bias_pw = all_img_bias[l][1], all_img_bias[l][2]
    l+=1
    bpw.append(bias_pw)
    bamp.append(bias_amp)
In [15]:
l=0
actions_new = []
actions_norm_new = []
index_tracker_new =[]

for ind in range(1,301): 
    
     
    xpos,ypos = all_img_bias[l][-2], all_img_bias[l][-1] 
    bias_amp, bias_pw = all_img_bias[l][1], all_img_bias[l][2]
    bias_pw=bias_pw*1000

    xpos_norm = xpos/pix
    ypos_norm = ypos/pix
    bias_amp_norm = bias_amp/max_bias
    bias_pw_norm = bias_pw/max_pw
    index_tracker.append((ind,l))
    l+=1

    
    actions_new.append([xpos,ypos, bias_amp, bias_pw])
    actions_norm_new.append([xpos_norm, ypos_norm, bias_amp_norm, bias_pw_norm])
In [16]:
phase_images_segmented = np.copy(amp_images_new)
phase_images_segmented = normalize_images(phase_images_segmented)
phase_images_segmented[phase_images_segmented<0.2] = 0
phase_images_segmented[phase_images_segmented>=0.4] = 1
In [17]:
for ind in range(1, len(phase_images_new)):
    tnew = namedtuple('Transition', ['state','action', 'next_state'])
    tnew_norm = namedtuple('Transition', ['state','action', 'next_state'])
    tnew_prof = namedtuple('Transition', ['state','action', 'next_state'])
    
    tnew.state = phase_images_segmented[ind-1]
    tnew.next_state = phase_images_segmented[ind]
    tnew.action = actions_new[ind]
    
    tnew_norm.state = phase_images_segmented[ind-1]
    tnew_norm.next_state = phase_images_segmented[ind]
    tnew_norm.action = actions_norm_new[ind]
    
    state = tnew.state
    next_state = tnew.next_state
    shift, error, diffphase = phase_cross_correlation(state, next_state,
                                                  upsample_factor=3)
    offset_image = fourier_shift(np.fft.fftn(next_state), shift)
    offset_image = np.fft.ifftn(offset_image)
    next_state = offset_image.real
    wps = return_norm_wall_loc(state, window_size=window_size) + start_pix
    wps_next = return_norm_wall_loc(next_state,window_size=window_size) + start_pix
    
    tnew_prof.state = wps
    tnew_prof.action = tnew_norm.action
    tnew_prof.next_state = wps_next
    
    #if not np.isnan(tnew.action[0]) and not np.isnan(actions[ind+1][0]):
    transitions.append(tnew)
    transitions_norm.append(tnew_norm)
    transitions_profiles.append(tnew_prof)
In [18]:
local_win_size = 7
offset=5
local_state_size = 14
train_fraction = 0.80
num_training_points = len(transitions_norm)

train_split_indices = np.random.choice(np.arange(len(transitions_norm)),
                                       (int(num_training_points*train_fraction)),
                                       replace = False)

test_split_indices = [val for val in np.arange(len(transitions_norm)) if val not in train_split_indices]

#Once we have the indices we need to make the training data. X_train, y_train, X_test, y_test
#X_train is the action, state, y_train is the state+1
#Same goes for X_test and y_test
X_train, y_train, X_test, y_test = [], [], [], []

for train_ind in train_split_indices:
    transition = transitions_norm[train_ind]
    trans_profile = transitions_profiles[train_ind]
    if transition.action[1] > min_ind and transition.action[1] < max_ind:
        wall_pos = transition.action[1]*128 + offset
        wall = np.array(trans_profile.state)
        local_wall = wall[int(wall_pos - local_win_size): int(wall_pos + local_win_size)]
        local_wall = local_wall - np.mean(local_wall)
        lwall = np.zeros(local_state_size)
        lwall[:len(local_wall)] = local_wall
        lwall[len(local_wall):] = local_wall[-1]
        X_train.append([lwall, transition.action[2:]])
        difference_profile = trans_profile.next_state - trans_profile.state
        difference_profile[difference_profile>10]=10.0
        difference_profile[difference_profile<-10]=-10.0
        difference_profile = difference_profile/10
        difference_profile = difference_profile[int(wall_pos - local_win_size): int(wall_pos + local_win_size)]
        dprof = np.zeros(local_state_size)
        dprof[:len(difference_profile)] = difference_profile
        dprof[len(difference_profile):] = difference_profile[-1]
        y_train.append(smooth_window(dprof,window_size=3))

for test_ind in test_split_indices:
    transition = transitions_norm[test_ind]
    trans_profile = transitions_profiles[test_ind]
    if transition.action[1] > min_ind and transition.action[1] < max_ind:
        wall_pos = transition.action[1]*128 + offset
        wall = np.array(trans_profile.state)
        local_wall = wall[int(wall_pos - local_win_size): int(wall_pos + local_win_size)]
        local_wall = local_wall - np.mean(local_wall)
        lwall = np.zeros(local_state_size)
        lwall[:len(local_wall)] = local_wall
        lwall[len(local_wall):] = local_wall[-1]
        X_test.append([lwall, transition.action[2:]])
        difference_profile = trans_profile.next_state - trans_profile.state
        difference_profile[difference_profile>10]=10.0
        difference_profile[difference_profile<-10]=-10.0
        difference_profile = difference_profile/10
        difference_profile = difference_profile[int(wall_pos - local_win_size): int(wall_pos + local_win_size)]
        dprof = np.zeros(local_state_size)
        dprof[:len(difference_profile)] = difference_profile
        dprof[len(difference_profile):] = difference_profile[-1]
        y_test.append(smooth_window(dprof,window_size=3))
    
def myGenerator(batch_size = 16, num_batches = 32, image_noise = 0.001,action_noise = 0.001):
    batch_num = 0
    while batch_num < num_batches:
        
        train_data_slice = np.random.choice(np.arange(len(X_train)),size = batch_size, replace = False)
        validation_data_slice = np.random.choice(np.arange(len(X_test)),
                                                 size = min(8,batch_size), replace = False)
        
        xtrain = [X_train[int(val)] for val in train_data_slice]
        ytrain = [y_train[int(val)] for val in train_data_slice]
        
        xtest = [X_test[int(val)] for val in validation_data_slice]
        ytest = [y_test[int(val)] for val in validation_data_slice]
        
        #Convert to tensorflow arrays - training data
        xtrain_images = np.zeros(shape=(batch_size, xtrain[0][0].shape[0]))
        for ind in range(len(train_data_slice)): 
            xtrain_images[ind,:] = xtrain[ind][0] + \
            np.random.normal(loc=0.0, scale = image_noise, size=(len(xtrain[ind][0])))                  
        xtrain_images = tf.stack(xtrain_images)
        
        xtrain_actions = np.zeros(shape=(batch_size, len(xtrain[0][1])))
        for ind in range(len(train_data_slice)): 
            xtrain_actions[ind,:] = xtrain[ind][1]
            
        xtrain_actions = tf.stack(xtrain_actions)
        xtrain = [xtrain_images[:,:,None], xtrain_actions]
        
        #Convert to tensorflow arrays - testing data
        xtest_images = np.zeros(shape=((len(xtest)), xtest[0][0].shape[0]))
        for ind in range(len(validation_data_slice)): 
            xtest_images[ind,:] = xtest[ind][0] + \
            np.random.normal(loc=0.0, scale = image_noise, size=(len(xtest[ind][0])))                    
        xtest_images = tf.stack(xtest_images)
        
        xtest_actions = np.zeros(shape=(len(xtest), len(xtest[0][1])))
        for ind in range(len(validation_data_slice)): 
            xtest_actions[ind,:] = xtest[ind][1] + np.random.normal(loc=0.0, scale = action_noise,size=(2))
            
        xtest_actions = tf.stack(xtest_actions)
        
        xtest = [xtest_images[:,:,None], xtest_actions]
        
        yield xtrain, tf.stack(ytrain), xtest, tf.stack(ytest)
        batch_num+=1
In [19]:
ynet = CL_ConvNeuralNet()

stats = []
mygen = myGenerator(batch_size = 32, num_batches = 3000)
i=0
train_separate_branch = False 
penalty=2.0
#Let's write the training
ok_spectra=0
bad_spectra=0
num_batches = 32

mse = tf.keras.losses.MeanSquaredError()

for mxtrain, mytrain, mxtest, mytest in mygen:
    with tf.GradientTape() as tape:
        
        prediction = ynet(mxtrain)
        actions_original = mxtrain[1][:,:]

        batch_loss=0
        for t in range(num_batches):
            local_loss=0
            Voltages=actions_original[0][:1]
            input_wall = mxtrain[0][t]

            PW=actions_original[t][1:]
            mynewtrain=mytrain[t]

            output_spectra = prediction[t,:]
            
            #new regularization for monotonicity
            mono_loss = 0
            test_images, test_actions = [], []
            pwidths = [0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]
            
            for p in pwidths:
                 test_action_val_new_valp=p   
                 test_action_val_new_valv=0.5
                 test_actions.append([test_action_val_new_valv, test_action_val_new_valp])
                 test_images.append(input_wall)
            test_input = [tf.stack(test_images), tf.stack(test_actions)] 
            output = ynet(test_input)
            npoutput = output.numpy()
            arr = np.trapz((npoutput.T-npoutput.T.min()))     
            for idx in range(1, len(arr)):
                if arr[idx - 1] < arr[idx]:
                    do_nothing=1
                else:
                    mono_loss+=1

            #local physics based loss penalization/regulatization
            for m in range(12):
                output_elementnp = output_spectra[m].numpy()
                mynewtrainelement = mynewtrain[m].numpy()
                output_element = output_spectra[m]
                Ratio = output_elementnp/Voltages
                if Ratio < 0: #single negative value, somethings wrong physically and we should penalize
                    bad_spectra = (mynewtrainelement- output_element)**2 * penalty
                if Ratio > 0:
                    ok_spectra = (mynewtrainelement- output_element)**2
                total_loss = (bad_spectra + ok_spectra)/12
                local_loss+=total_loss
                local_loss+=mono_loss/(i+1)
            batch_loss+=local_loss
            
        print("loss batch it {} is {:.5f}  ".format(i, batch_loss))
            

        gradients = tape.gradient(batch_loss, ynet.trainable_variables)

    ynet.optimizer.apply_gradients(zip(gradients, ynet.trainable_variables))
    
    
    i+=1
    
    
#ynet.model.save_weights('trained_surrogate_weights_phys.h5')
2023-05-23 11:02:33.371933: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations:  AVX2 AVX512F AVX512_VNNI FMA
To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
/Users/bry/opt/anaconda3/lib/python3.9/site-packages/keras/initializers/initializers_v2.py:120: UserWarning: The initializer HeUniform is unseeded and being called multiple times, which will return identical values  each time (even if the initializer is unseeded). Please update your code to provide a seed to the initializer, or avoid using the same initalizer instance more than once.
  warnings.warn(
loss batch it 0 is 2854.06201  
loss batch it 1 is 73105.25000  
loss batch it 2 is 1331.61279  
loss batch it 3 is 810.42670  
loss batch it 4 is 504.09863  
loss batch it 5 is 382.76746  
loss batch it 6 is 300.74353  
loss batch it 7 is 275.84201  
loss batch it 8 is 269.53278  
loss batch it 9 is 220.10960  
loss batch it 10 is 196.16895  
loss batch it 11 is 201.94739  
loss batch it 12 is 187.60081  
loss batch it 13 is 203.14417  
loss batch it 14 is 174.61566  
loss batch it 15 is 141.28506  
loss batch it 16 is 134.05098  
loss batch it 17 is 124.26223  
loss batch it 18 is 123.94591  
loss batch it 19 is 115.14157  
loss batch it 20 is 105.34750  
loss batch it 21 is 100.48485  
loss batch it 22 is 108.44222  
loss batch it 23 is 110.19262  
loss batch it 24 is 101.14913  
loss batch it 25 is 104.38560  
loss batch it 26 is 97.70222  
loss batch it 27 is 90.96044  
loss batch it 28 is 82.43442  
loss batch it 29 is 72.84116  
loss batch it 30 is 69.19463  
loss batch it 31 is 69.84611  
loss batch it 32 is 72.82991  
loss batch it 33 is 69.59209  
loss batch it 34 is 66.76229  
loss batch it 35 is 61.05825  
loss batch it 36 is 74.00108  
loss batch it 37 is 92.45108  
loss batch it 38 is 71.27360  
loss batch it 39 is 66.88109  
loss batch it 40 is 61.35314  
loss batch it 41 is 54.62311  
loss batch it 42 is 55.64886  
loss batch it 43 is 58.03754  
loss batch it 44 is 59.51264  
loss batch it 45 is 56.87920  
loss batch it 46 is 51.32384  
loss batch it 47 is 53.08141  
loss batch it 48 is 51.92945  
loss batch it 49 is 44.09789  
loss batch it 50 is 42.71856  
loss batch it 51 is 48.42256  
loss batch it 52 is 46.09244  
loss batch it 53 is 48.07730  
loss batch it 54 is 53.10695  
loss batch it 55 is 41.37045  
loss batch it 56 is 43.26941  
loss batch it 57 is 38.72185  
loss batch it 58 is 42.34612  
loss batch it 59 is 40.46330  
loss batch it 60 is 40.99585  
loss batch it 61 is 40.22071  
loss batch it 62 is 38.56925  
loss batch it 63 is 43.10324  
loss batch it 64 is 49.29799  
loss batch it 65 is 43.73853  
loss batch it 66 is 40.63553  
loss batch it 67 is 41.70190  
loss batch it 68 is 39.14394  
loss batch it 69 is 38.10758  
loss batch it 70 is 33.20763  
loss batch it 71 is 37.57956  
loss batch it 72 is 31.98932  
loss batch it 73 is 35.66661  
loss batch it 74 is 39.29317  
loss batch it 75 is 28.49319  
loss batch it 76 is 37.29789  
loss batch it 77 is 35.15478  
loss batch it 78 is 39.21164  
loss batch it 79 is 31.88801  
loss batch it 80 is 34.59898  
loss batch it 81 is 35.92958  
loss batch it 82 is 31.19754  
loss batch it 83 is 33.24626  
loss batch it 84 is 30.74020  
loss batch it 85 is 32.44876  
loss batch it 86 is 31.55653  
loss batch it 87 is 31.86154  
loss batch it 88 is 29.94460  
loss batch it 89 is 31.42233  
loss batch it 90 is 36.01522  
loss batch it 91 is 29.38017  
loss batch it 92 is 31.76333  
loss batch it 93 is 25.45685  
loss batch it 94 is 29.32216  
loss batch it 95 is 36.98973  
loss batch it 96 is 29.34196  
loss batch it 97 is 41.98963  
loss batch it 98 is 32.17106  
loss batch it 99 is 26.98546  
loss batch it 100 is 28.37156  
loss batch it 101 is 28.92695  
loss batch it 102 is 27.02417  
loss batch it 103 is 23.03618  
loss batch it 104 is 24.58759  
loss batch it 105 is 29.87956  
loss batch it 106 is 28.51639  
loss batch it 107 is 26.75951  
loss batch it 108 is 25.40937  
loss batch it 109 is 25.17841  
loss batch it 110 is 26.61622  
loss batch it 111 is 24.79905  
loss batch it 112 is 26.10138  
loss batch it 113 is 24.60226  
loss batch it 114 is 28.58419  
loss batch it 115 is 23.20041  
loss batch it 116 is 24.55980  
loss batch it 117 is 24.95312  
loss batch it 118 is 20.60688  
loss batch it 119 is 23.41150  
loss batch it 120 is 23.27147  
loss batch it 121 is 20.39636  
loss batch it 122 is 21.30862  
loss batch it 123 is 21.62584  
loss batch it 124 is 24.48325  
loss batch it 125 is 23.73712  
loss batch it 126 is 22.80413  
loss batch it 127 is 21.86084  
loss batch it 128 is 20.97674  
loss batch it 129 is 28.21165  
loss batch it 130 is 29.91176  
loss batch it 131 is 27.38731  
loss batch it 132 is 23.02882  
loss batch it 133 is 27.88765  
loss batch it 134 is 35.15747  
loss batch it 135 is 26.41485  
loss batch it 136 is 39.13646  
loss batch it 137 is 38.03997  
loss batch it 138 is 58.86914  
loss batch it 139 is 26.47685  
loss batch it 140 is 18.94923  
loss batch it 141 is 25.53605  
loss batch it 142 is 26.51744  
loss batch it 143 is 23.45871  
loss batch it 144 is 20.02329  
loss batch it 145 is 21.71984  
loss batch it 146 is 18.88279  
loss batch it 147 is 19.16609  
loss batch it 148 is 17.70323  
loss batch it 149 is 21.68288  
loss batch it 150 is 21.52621  
loss batch it 151 is 20.18753  
loss batch it 152 is 17.78029  
loss batch it 153 is 19.20727  
loss batch it 154 is 20.94039  
loss batch it 155 is 18.18733  
loss batch it 156 is 23.56662  
loss batch it 157 is 19.39461  
loss batch it 158 is 19.88085  
loss batch it 159 is 20.17317  
loss batch it 160 is 25.87083  
loss batch it 161 is 21.16114  
loss batch it 162 is 15.78042  
loss batch it 163 is 19.04870  
loss batch it 164 is 18.24196  
loss batch it 165 is 17.42711  
loss batch it 166 is 16.45247  
loss batch it 167 is 16.89205  
loss batch it 168 is 15.86429  
loss batch it 169 is 15.71264  
loss batch it 170 is 21.46440  
loss batch it 171 is 19.13174  
loss batch it 172 is 23.78532  
loss batch it 173 is 18.74704  
loss batch it 174 is 18.62149  
loss batch it 175 is 19.06637  
loss batch it 176 is 24.41806  
loss batch it 177 is 22.16565  
loss batch it 178 is 17.34184  
loss batch it 179 is 19.00205  
loss batch it 180 is 19.50864  
loss batch it 181 is 19.84567  
loss batch it 182 is 20.25270  
loss batch it 183 is 16.41031  
loss batch it 184 is 15.83098  
loss batch it 185 is 16.44703  
loss batch it 186 is 18.35921  
loss batch it 187 is 15.85541  
loss batch it 188 is 14.81808  
loss batch it 189 is 16.09051  
loss batch it 190 is 15.15237  
loss batch it 191 is 15.17239  
loss batch it 192 is 13.69260  
loss batch it 193 is 15.35709  
loss batch it 194 is 14.75997  
loss batch it 195 is 12.45158  
loss batch it 196 is 17.94584  
loss batch it 197 is 17.57036  
loss batch it 198 is 18.38358  
loss batch it 199 is 17.90710  
loss batch it 200 is 17.46078  
loss batch it 201 is 16.67033  
loss batch it 202 is 17.71371  
loss batch it 203 is 17.26128  
loss batch it 204 is 17.92533  
loss batch it 205 is 14.68049  
loss batch it 206 is 14.64708  
loss batch it 207 is 11.78608  
loss batch it 208 is 12.54588  
loss batch it 209 is 12.67814  
loss batch it 210 is 16.87972  
loss batch it 211 is 17.07954  
loss batch it 212 is 20.93736  
loss batch it 213 is 23.07738  
loss batch it 214 is 19.67770  
loss batch it 215 is 16.88903  
loss batch it 216 is 16.73918  
loss batch it 217 is 17.27884  
loss batch it 218 is 15.96084  
loss batch it 219 is 12.37597  
loss batch it 220 is 14.65746  
loss batch it 221 is 15.49700  
loss batch it 222 is 18.64174  
loss batch it 223 is 16.06863  
loss batch it 224 is 13.85552  
loss batch it 225 is 20.28049  
loss batch it 226 is 14.59852  
loss batch it 227 is 11.82384  
loss batch it 228 is 10.95117  
loss batch it 229 is 12.80226  
loss batch it 230 is 12.59933  
loss batch it 231 is 12.12512  
loss batch it 232 is 17.19498  
loss batch it 233 is 12.02704  
loss batch it 234 is 15.44988  
loss batch it 235 is 11.92048  
loss batch it 236 is 11.35487  
loss batch it 237 is 7.65519  
loss batch it 238 is 12.50121  
loss batch it 239 is 11.68167  
loss batch it 240 is 14.83444  
loss batch it 241 is 18.83118  
loss batch it 242 is 15.21333  
loss batch it 243 is 12.06831  
loss batch it 244 is 12.98625  
loss batch it 245 is 12.90747  
loss batch it 246 is 16.49833  
loss batch it 247 is 14.24296  
loss batch it 248 is 16.18800  
loss batch it 249 is 16.18693  
loss batch it 250 is 15.90659  
loss batch it 251 is 14.28231  
loss batch it 252 is 16.12292  
loss batch it 253 is 15.46233  
loss batch it 254 is 14.09233  
loss batch it 255 is 18.88695  
loss batch it 256 is 19.96766  
loss batch it 257 is 13.31786  
loss batch it 258 is 12.78397  
loss batch it 259 is 18.20739  
loss batch it 260 is 16.68514  
loss batch it 261 is 20.80042  
loss batch it 262 is 21.17194  
loss batch it 263 is 77.38215  
loss batch it 264 is 11.54424  
loss batch it 265 is 17.09915  
loss batch it 266 is 16.29238  
loss batch it 267 is 14.68542  
loss batch it 268 is 16.50939  
loss batch it 269 is 14.73584  
loss batch it 270 is 15.07107  
loss batch it 271 is 10.84853  
loss batch it 272 is 16.36086  
loss batch it 273 is 12.09938  
loss batch it 274 is 11.82972  
loss batch it 275 is 11.10136  
loss batch it 276 is 10.56245  
loss batch it 277 is 11.54123  
loss batch it 278 is 9.30022  
loss batch it 279 is 12.71163  
loss batch it 280 is 13.39298  
loss batch it 281 is 14.02941  
loss batch it 282 is 9.90559  
loss batch it 283 is 14.64041  
loss batch it 284 is 13.73622  
loss batch it 285 is 11.57052  
loss batch it 286 is 17.05399  
loss batch it 287 is 15.65322  
loss batch it 288 is 9.43672  
loss batch it 289 is 8.80005  
loss batch it 290 is 13.85168  
loss batch it 291 is 10.78211  
loss batch it 292 is 14.10669  
loss batch it 293 is 13.79233  
loss batch it 294 is 12.02435  
loss batch it 295 is 11.00671  
loss batch it 296 is 10.99632  
loss batch it 297 is 13.34615  
loss batch it 298 is 16.51041  
loss batch it 299 is 13.30330  
loss batch it 300 is 13.73232  
loss batch it 301 is 19.05601  
loss batch it 302 is 13.32664  
loss batch it 303 is 14.82320  
loss batch it 304 is 12.77784  
loss batch it 305 is 12.09181  
loss batch it 306 is 11.43948  
loss batch it 307 is 12.02935  
loss batch it 308 is 11.05822  
loss batch it 309 is 10.74550  
loss batch it 310 is 12.78724  
loss batch it 311 is 12.54898  
loss batch it 312 is 9.72425  
loss batch it 313 is 13.29356  
loss batch it 314 is 11.79684  
loss batch it 315 is 10.22488  
loss batch it 316 is 9.64072  
loss batch it 317 is 8.71790  
loss batch it 318 is 11.07276  
loss batch it 319 is 9.80517  
loss batch it 320 is 11.99988  
loss batch it 321 is 11.20971  
loss batch it 322 is 10.75307  
loss batch it 323 is 11.04849  
loss batch it 324 is 10.07648  
loss batch it 325 is 16.04034  
loss batch it 326 is 18.20913  
loss batch it 327 is 13.02055  
loss batch it 328 is 12.62633  
loss batch it 329 is 12.60761  
loss batch it 330 is 12.54176  
loss batch it 331 is 13.34200  
loss batch it 332 is 14.44853  
loss batch it 333 is 12.75510  
loss batch it 334 is 13.39214  
loss batch it 335 is 11.53017  
loss batch it 336 is 29.61502  
loss batch it 337 is 11.29967  
loss batch it 338 is 12.47115  
loss batch it 339 is 8.27373  
loss batch it 340 is 9.74421  
loss batch it 341 is 11.07387  
loss batch it 342 is 10.01485  
loss batch it 343 is 21.02715  
loss batch it 344 is 15.31482  
loss batch it 345 is 13.82732  
loss batch it 346 is 9.56860  
loss batch it 347 is 12.53812  
loss batch it 348 is 11.00505  
loss batch it 349 is 14.97185  
loss batch it 350 is 8.69174  
loss batch it 351 is 12.00899  
loss batch it 352 is 9.18408  
loss batch it 353 is 11.17735  
loss batch it 354 is 10.78451  
loss batch it 355 is 8.62953  
loss batch it 356 is 9.93869  
loss batch it 357 is 11.51861  
loss batch it 358 is 8.73094  
loss batch it 359 is 12.66300  
loss batch it 360 is 10.08922  
loss batch it 361 is 10.27706  
loss batch it 362 is 8.81519  
loss batch it 363 is 10.55221  
loss batch it 364 is 10.69351  
loss batch it 365 is 14.51502  
loss batch it 366 is 14.68783  
loss batch it 367 is 9.58140  
loss batch it 368 is 8.38127  
loss batch it 369 is 9.53692  
loss batch it 370 is 9.91588  
loss batch it 371 is 10.09877  
loss batch it 372 is 10.39073  
loss batch it 373 is 9.39728  
loss batch it 374 is 10.96836  
loss batch it 375 is 9.49568  
loss batch it 376 is 8.37630  
loss batch it 377 is 8.04678  
loss batch it 378 is 8.44243  
loss batch it 379 is 10.27540  
loss batch it 380 is 9.15629  
loss batch it 381 is 11.13985  
loss batch it 382 is 9.49365  
loss batch it 383 is 8.98770  
loss batch it 384 is 9.69727  
loss batch it 385 is 16.22311  
loss batch it 386 is 9.17900  
loss batch it 387 is 9.05502  
loss batch it 388 is 9.09955  
loss batch it 389 is 9.88685  
loss batch it 390 is 8.54858  
loss batch it 391 is 12.05929  
loss batch it 392 is 9.55550  
loss batch it 393 is 10.96100  
loss batch it 394 is 11.44264  
loss batch it 395 is 13.65880  
loss batch it 396 is 11.71145  
loss batch it 397 is 10.53178  
loss batch it 398 is 10.71710  
loss batch it 399 is 9.92533  
loss batch it 400 is 9.47252  
loss batch it 401 is 12.51796  
loss batch it 402 is 12.53989  
loss batch it 403 is 10.25783  
loss batch it 404 is 12.40407  
loss batch it 405 is 13.86838  
loss batch it 406 is 10.00240  
loss batch it 407 is 12.14268  
loss batch it 408 is 11.25349  
loss batch it 409 is 11.80318  
loss batch it 410 is 9.35742  
loss batch it 411 is 8.24564  
loss batch it 412 is 12.57483  
loss batch it 413 is 6.21431  
loss batch it 414 is 9.25229  
loss batch it 415 is 10.14990  
loss batch it 416 is 16.17402  
loss batch it 417 is 9.47159  
loss batch it 418 is 8.94519  
loss batch it 419 is 11.17743  
loss batch it 420 is 10.46294  
loss batch it 421 is 8.30767  
loss batch it 422 is 10.25363  
loss batch it 423 is 10.18073  
loss batch it 424 is 7.94903  
loss batch it 425 is 8.93771  
loss batch it 426 is 10.57582  
loss batch it 427 is 8.93668  
loss batch it 428 is 8.51122  
loss batch it 429 is 13.44404  
loss batch it 430 is 9.21845  
loss batch it 431 is 10.88426  
loss batch it 432 is 7.75547  
loss batch it 433 is 8.92153  
loss batch it 434 is 8.65666  
loss batch it 435 is 12.52164  
loss batch it 436 is 7.99296  
loss batch it 437 is 7.28957  
loss batch it 438 is 8.64715  
loss batch it 439 is 8.48977  
loss batch it 440 is 9.22625  
loss batch it 441 is 8.83961  
loss batch it 442 is 8.32116  
loss batch it 443 is 9.20234  
loss batch it 444 is 11.11722  
loss batch it 445 is 9.34054  
loss batch it 446 is 12.00647  
loss batch it 447 is 7.45751  
loss batch it 448 is 12.91064  
loss batch it 449 is 9.44721  
loss batch it 450 is 8.79545  
loss batch it 451 is 8.25744  
loss batch it 452 is 10.16473  
loss batch it 453 is 11.07540  
loss batch it 454 is 8.74304  
loss batch it 455 is 8.16129  
loss batch it 456 is 9.68068  
loss batch it 457 is 14.75784  
loss batch it 458 is 10.29521  
loss batch it 459 is 12.25242  
loss batch it 460 is 14.57144  
loss batch it 461 is 11.50669  
loss batch it 462 is 7.81191  
loss batch it 463 is 11.35629  
loss batch it 464 is 9.54150  
loss batch it 465 is 9.39928  
loss batch it 466 is 8.27188  
loss batch it 467 is 7.34879  
loss batch it 468 is 8.24450  
loss batch it 469 is 8.02849  
loss batch it 470 is 10.78988  
loss batch it 471 is 10.40357  
loss batch it 472 is 11.79144  
loss batch it 473 is 11.57728  
loss batch it 474 is 8.07297  
loss batch it 475 is 11.92951  
loss batch it 476 is 11.34464  
loss batch it 477 is 8.24691  
loss batch it 478 is 10.71821  
loss batch it 479 is 9.66746  
loss batch it 480 is 7.44475  
loss batch it 481 is 11.76942  
loss batch it 482 is 9.12698  
loss batch it 483 is 8.67940  
loss batch it 484 is 8.45521  
loss batch it 485 is 7.94629  
loss batch it 486 is 10.74617  
loss batch it 487 is 9.43706  
loss batch it 488 is 9.60433  
loss batch it 489 is 8.11700  
loss batch it 490 is 8.61518  
loss batch it 491 is 7.16706  
loss batch it 492 is 6.76352  
loss batch it 493 is 8.54750  
loss batch it 494 is 8.50683  
loss batch it 495 is 9.15018  
loss batch it 496 is 6.87813  
loss batch it 497 is 7.66154  
loss batch it 498 is 7.95551  
loss batch it 499 is 7.07330  
loss batch it 500 is 6.96843  
loss batch it 501 is 7.56444  
loss batch it 502 is 6.65355  
loss batch it 503 is 7.10441  
loss batch it 504 is 6.63559  
loss batch it 505 is 14.31892  
loss batch it 506 is 9.68791  
loss batch it 507 is 6.43823  
loss batch it 508 is 8.15130  
loss batch it 509 is 6.72845  
loss batch it 510 is 8.13237  
loss batch it 511 is 6.91744  
loss batch it 512 is 9.73060  
loss batch it 513 is 8.09655  
loss batch it 514 is 10.35647  
loss batch it 515 is 6.29350  
loss batch it 516 is 9.41921  
loss batch it 517 is 7.91985  
loss batch it 518 is 7.91887  
loss batch it 519 is 13.79007  
loss batch it 520 is 11.38604  
loss batch it 521 is 13.42225  
loss batch it 522 is 8.43953  
loss batch it 523 is 8.93966  
loss batch it 524 is 12.34899  
loss batch it 525 is 9.38851  
loss batch it 526 is 7.37536  
loss batch it 527 is 7.42793  
loss batch it 528 is 11.01891  
loss batch it 529 is 7.03595  
loss batch it 530 is 9.79636  
loss batch it 531 is 8.47890  
loss batch it 532 is 8.49056  
loss batch it 533 is 8.76555  
loss batch it 534 is 7.87971  
loss batch it 535 is 9.46563  
loss batch it 536 is 7.70441  
loss batch it 537 is 7.07831  
loss batch it 538 is 9.62633  
loss batch it 539 is 10.96468  
loss batch it 540 is 10.40391  
loss batch it 541 is 6.55271  
loss batch it 542 is 8.10938  
loss batch it 543 is 9.52304  
loss batch it 544 is 7.81131  
loss batch it 545 is 9.73355  
loss batch it 546 is 9.56019  
loss batch it 547 is 8.43753  
loss batch it 548 is 8.01329  
loss batch it 549 is 10.92544  
loss batch it 550 is 13.10058  
loss batch it 551 is 7.51061  
loss batch it 552 is 6.22158  
loss batch it 553 is 9.34587  
loss batch it 554 is 8.31969  
loss batch it 555 is 10.99627  
loss batch it 556 is 9.23418  
loss batch it 557 is 7.65522  
loss batch it 558 is 9.13332  
loss batch it 559 is 10.00177  
loss batch it 560 is 9.62123  
loss batch it 561 is 7.28883  
loss batch it 562 is 8.99247  
loss batch it 563 is 6.87232  
loss batch it 564 is 8.00274  
loss batch it 565 is 9.50704  
loss batch it 566 is 10.09281  
loss batch it 567 is 9.93748  
loss batch it 568 is 7.53338  
loss batch it 569 is 7.12426  
loss batch it 570 is 7.19046  
loss batch it 571 is 6.80267  
loss batch it 572 is 6.53964  
loss batch it 573 is 5.17227  
loss batch it 574 is 7.45937  
loss batch it 575 is 6.32757  
loss batch it 576 is 6.75317  
loss batch it 577 is 8.54197  
loss batch it 578 is 8.49971  
loss batch it 579 is 8.28329  
loss batch it 580 is 11.38855  
loss batch it 581 is 11.21050  
loss batch it 582 is 5.58261  
loss batch it 583 is 6.90181  
loss batch it 584 is 5.88482  
loss batch it 585 is 6.17415  
loss batch it 586 is 6.23582  
loss batch it 587 is 6.27557  
loss batch it 588 is 5.76509  
loss batch it 589 is 7.08143  
loss batch it 590 is 6.63617  
loss batch it 591 is 8.77126  
loss batch it 592 is 10.71196  
loss batch it 593 is 7.42774  
loss batch it 594 is 9.57762  
loss batch it 595 is 8.20879  
loss batch it 596 is 6.32316  
loss batch it 597 is 14.55286  
loss batch it 598 is 8.29068  
loss batch it 599 is 8.84327  
loss batch it 600 is 8.62878  
loss batch it 601 is 12.51262  
loss batch it 602 is 6.58335  
loss batch it 603 is 8.99049  
loss batch it 604 is 12.81497  
loss batch it 605 is 13.00368  
loss batch it 606 is 9.45007  
loss batch it 607 is 7.41933  
loss batch it 608 is 8.39756  
loss batch it 609 is 8.25145  
loss batch it 610 is 11.50063  
loss batch it 611 is 8.87465  
loss batch it 612 is 8.40245  
loss batch it 613 is 8.03988  
loss batch it 614 is 9.96969  
loss batch it 615 is 7.48198  
loss batch it 616 is 6.96351  
loss batch it 617 is 7.77158  
loss batch it 618 is 8.53679  
loss batch it 619 is 9.33654  
loss batch it 620 is 12.41860  
loss batch it 621 is 9.15942  
loss batch it 622 is 8.19638  
loss batch it 623 is 6.36401  
loss batch it 624 is 6.57269  
loss batch it 625 is 8.13787  
loss batch it 626 is 8.14976  
loss batch it 627 is 5.93859  
loss batch it 628 is 8.13351  
loss batch it 629 is 8.65775  
loss batch it 630 is 12.63371  
loss batch it 631 is 8.48913  
loss batch it 632 is 6.55901  
loss batch it 633 is 6.37704  
loss batch it 634 is 9.90445  
loss batch it 635 is 5.79657  
loss batch it 636 is 8.14494  
loss batch it 637 is 5.32637  
loss batch it 638 is 5.59904  
loss batch it 639 is 8.56188  
loss batch it 640 is 5.74957  
loss batch it 641 is 6.15664  
loss batch it 642 is 6.60095  
loss batch it 643 is 7.33571  
loss batch it 644 is 6.83261  
loss batch it 645 is 9.89835  
loss batch it 646 is 8.42789  
loss batch it 647 is 7.49953  
loss batch it 648 is 4.98661  
loss batch it 649 is 8.36603  
loss batch it 650 is 5.64531  
loss batch it 651 is 4.86945  
loss batch it 652 is 6.05700  
loss batch it 653 is 6.51375  
loss batch it 654 is 5.94830  
loss batch it 655 is 7.00786  
loss batch it 656 is 7.45974  
loss batch it 657 is 5.53535  
loss batch it 658 is 9.34278  
loss batch it 659 is 6.20131  
loss batch it 660 is 6.44471  
loss batch it 661 is 5.22377  
loss batch it 662 is 7.23526  
loss batch it 663 is 6.22709  
loss batch it 664 is 7.25731  
loss batch it 665 is 7.93233  
loss batch it 666 is 7.81218  
loss batch it 667 is 6.61355  
loss batch it 668 is 5.83328  
loss batch it 669 is 5.17983  
loss batch it 670 is 6.51652  
loss batch it 671 is 7.10248  
loss batch it 672 is 5.10920  
loss batch it 673 is 6.56672  
loss batch it 674 is 8.06344  
loss batch it 675 is 10.11578  
loss batch it 676 is 6.97912  
loss batch it 677 is 5.77336  
loss batch it 678 is 4.98305  
loss batch it 679 is 5.17860  
loss batch it 680 is 6.29316  
loss batch it 681 is 6.65637  
loss batch it 682 is 4.98637  
loss batch it 683 is 6.08788  
loss batch it 684 is 5.60216  
loss batch it 685 is 10.06820  
loss batch it 686 is 5.77859  
loss batch it 687 is 6.14079  
loss batch it 688 is 8.38408  
loss batch it 689 is 7.02037  
loss batch it 690 is 5.52027  
loss batch it 691 is 6.79521  
loss batch it 692 is 8.16587  
loss batch it 693 is 4.44874  
loss batch it 694 is 5.51228  
loss batch it 695 is 4.83856  
loss batch it 696 is 4.77146  
loss batch it 697 is 5.62820  
loss batch it 698 is 4.75237  
loss batch it 699 is 5.45741  
loss batch it 700 is 7.33474  
loss batch it 701 is 5.68616  
loss batch it 702 is 4.82073  
loss batch it 703 is 4.76467  
loss batch it 704 is 6.81301  
loss batch it 705 is 5.51069  
loss batch it 706 is 5.93788  
loss batch it 707 is 4.60616  
loss batch it 708 is 5.20876  
loss batch it 709 is 6.14523  
loss batch it 710 is 5.78577  
loss batch it 711 is 7.36933  
loss batch it 712 is 7.83334  
loss batch it 713 is 7.17009  
loss batch it 714 is 6.59390  
loss batch it 715 is 5.25756  
loss batch it 716 is 6.35187  
loss batch it 717 is 5.61588  
loss batch it 718 is 4.53924  
loss batch it 719 is 6.73320  
loss batch it 720 is 6.80492  
loss batch it 721 is 4.94104  
loss batch it 722 is 8.54830  
loss batch it 723 is 5.84919  
loss batch it 724 is 8.17328  
loss batch it 725 is 6.33492  
loss batch it 726 is 5.81120  
loss batch it 727 is 6.32434  
loss batch it 728 is 5.30224  
loss batch it 729 is 6.05007  
loss batch it 730 is 6.08772  
loss batch it 731 is 4.95568  
loss batch it 732 is 6.01336  
loss batch it 733 is 5.18688  
loss batch it 734 is 5.77565  
loss batch it 735 is 6.99194  
loss batch it 736 is 4.34326  
loss batch it 737 is 5.97257  
loss batch it 738 is 4.00189  
loss batch it 739 is 5.33672  
loss batch it 740 is 4.30217  
loss batch it 741 is 4.87859  
loss batch it 742 is 4.91969  
loss batch it 743 is 4.55930  
loss batch it 744 is 4.94910  
loss batch it 745 is 4.39912  
loss batch it 746 is 4.15833  
loss batch it 747 is 5.51615  
loss batch it 748 is 4.28279  
loss batch it 749 is 6.59327  
loss batch it 750 is 7.36820  
loss batch it 751 is 5.11996  
loss batch it 752 is 4.54415  
loss batch it 753 is 6.73215  
loss batch it 754 is 4.63954  
loss batch it 755 is 5.14473  
loss batch it 756 is 5.21052  
loss batch it 757 is 6.53821  
loss batch it 758 is 4.38644  
loss batch it 759 is 4.60377  
loss batch it 760 is 5.54638  
loss batch it 761 is 5.73406  
loss batch it 762 is 4.76866  
loss batch it 763 is 4.64012  
loss batch it 764 is 4.54552  
loss batch it 765 is 4.38381  
loss batch it 766 is 4.79163  
loss batch it 767 is 4.91675  
loss batch it 768 is 4.39227  
loss batch it 769 is 4.91638  
loss batch it 770 is 4.11416  
loss batch it 771 is 4.93446  
loss batch it 772 is 4.80305  
loss batch it 773 is 5.08012  
loss batch it 774 is 4.68390  
loss batch it 775 is 5.21447  
loss batch it 776 is 7.01811  
loss batch it 777 is 9.97948  
loss batch it 778 is 6.36499  
loss batch it 779 is 4.93178  
loss batch it 780 is 4.73404  
loss batch it 781 is 5.35165  
loss batch it 782 is 5.40787  
loss batch it 783 is 4.43724  
loss batch it 784 is 4.79773  
loss batch it 785 is 5.37940  
loss batch it 786 is 6.60024  
loss batch it 787 is 4.52494  
loss batch it 788 is 3.65199  
loss batch it 789 is 4.36986  
loss batch it 790 is 4.27844  
loss batch it 791 is 4.17199  
loss batch it 792 is 4.63691  
loss batch it 793 is 5.48722  
loss batch it 794 is 5.89093  
loss batch it 795 is 4.64235  
loss batch it 796 is 3.94953  
loss batch it 797 is 5.66319  
loss batch it 798 is 4.73533  
loss batch it 799 is 4.69402  
loss batch it 800 is 4.03463  
loss batch it 801 is 3.96906  
loss batch it 802 is 4.90865  
loss batch it 803 is 3.94030  
loss batch it 804 is 5.00567  
loss batch it 805 is 3.82701  
loss batch it 806 is 3.72168  
loss batch it 807 is 4.23582  
loss batch it 808 is 5.17767  
loss batch it 809 is 4.97170  
loss batch it 810 is 5.13860  
loss batch it 811 is 5.79757  
loss batch it 812 is 4.78068  
loss batch it 813 is 5.14948  
loss batch it 814 is 4.82118  
loss batch it 815 is 4.55298  
loss batch it 816 is 4.07319  
loss batch it 817 is 4.76391  
loss batch it 818 is 4.42749  
loss batch it 819 is 5.17523  
loss batch it 820 is 4.00336  
loss batch it 821 is 5.01460  
loss batch it 822 is 3.88336  
loss batch it 823 is 4.73953  
loss batch it 824 is 3.47875  
loss batch it 825 is 5.87535  
loss batch it 826 is 4.92544  
loss batch it 827 is 4.03018  
loss batch it 828 is 5.49051  
loss batch it 829 is 5.75341  
loss batch it 830 is 3.88963  
loss batch it 831 is 3.84885  
loss batch it 832 is 3.43958  
loss batch it 833 is 4.37822  
loss batch it 834 is 9.09048  
loss batch it 835 is 5.40116  
loss batch it 836 is 6.55946  
loss batch it 837 is 3.65212  
loss batch it 838 is 4.39078  
loss batch it 839 is 4.87334  
loss batch it 840 is 5.40767  
loss batch it 841 is 3.73857  
loss batch it 842 is 3.59829  
loss batch it 843 is 4.53533  
loss batch it 844 is 4.63634  
loss batch it 845 is 4.18687  
loss batch it 846 is 4.37358  
loss batch it 847 is 5.25125  
loss batch it 848 is 4.84102  
loss batch it 849 is 3.77782  
loss batch it 850 is 3.63409  
loss batch it 851 is 4.06037  
loss batch it 852 is 4.05024  
loss batch it 853 is 3.42201  
loss batch it 854 is 5.06551  
loss batch it 855 is 4.28964  
loss batch it 856 is 4.53898  
loss batch it 857 is 3.96703  
loss batch it 858 is 3.93250  
loss batch it 859 is 4.23991  
loss batch it 860 is 3.90430  
loss batch it 861 is 3.86059  
loss batch it 862 is 4.05648  
loss batch it 863 is 4.13993  
loss batch it 864 is 4.69288  
loss batch it 865 is 3.95499  
loss batch it 866 is 4.06287  
loss batch it 867 is 4.13934  
loss batch it 868 is 4.12906  
loss batch it 869 is 3.49508  
loss batch it 870 is 4.79297  
loss batch it 871 is 4.09103  
loss batch it 872 is 4.11708  
loss batch it 873 is 3.03573  
loss batch it 874 is 3.79532  
loss batch it 875 is 5.21054  
loss batch it 876 is 4.03887  
loss batch it 877 is 4.27451  
loss batch it 878 is 4.43048  
loss batch it 879 is 3.40053  
loss batch it 880 is 4.96924  
loss batch it 881 is 5.69763  
loss batch it 882 is 5.07649  
loss batch it 883 is 5.18846  
loss batch it 884 is 4.68804  
loss batch it 885 is 4.66401  
loss batch it 886 is 4.95053  
loss batch it 887 is 4.61898  
loss batch it 888 is 6.13275  
loss batch it 889 is 3.22974  
loss batch it 890 is 3.71128  
loss batch it 891 is 3.90521  
loss batch it 892 is 4.05738  
loss batch it 893 is 3.98834  
loss batch it 894 is 4.23731  
loss batch it 895 is 4.23923  
loss batch it 896 is 4.21606  
loss batch it 897 is 5.38526  
loss batch it 898 is 3.77388  
loss batch it 899 is 3.68508  
loss batch it 900 is 3.26205  
loss batch it 901 is 3.90731  
loss batch it 902 is 3.57280  
loss batch it 903 is 3.52264  
loss batch it 904 is 3.19412  
loss batch it 905 is 3.30440  
loss batch it 906 is 3.47194  
loss batch it 907 is 2.92764  
loss batch it 908 is 4.38072  
loss batch it 909 is 3.59207  
loss batch it 910 is 3.89600  
loss batch it 911 is 4.26731  
loss batch it 912 is 3.46826  
loss batch it 913 is 3.63386  
loss batch it 914 is 3.76312  
loss batch it 915 is 4.07602  
loss batch it 916 is 6.22074  
loss batch it 917 is 4.87073  
loss batch it 918 is 4.13841  
loss batch it 919 is 4.38743  
loss batch it 920 is 4.50848  
loss batch it 921 is 3.71006  
loss batch it 922 is 3.46945  
loss batch it 923 is 3.48225  
loss batch it 924 is 3.30328  
loss batch it 925 is 5.32921  
loss batch it 926 is 4.47384  
loss batch it 927 is 3.74645  
loss batch it 928 is 5.30258  
loss batch it 929 is 4.29041  
loss batch it 930 is 3.35557  
loss batch it 931 is 3.37997  
loss batch it 932 is 3.27451  
loss batch it 933 is 3.16878  
loss batch it 934 is 4.00898  
loss batch it 935 is 3.49984  
loss batch it 936 is 3.20796  
loss batch it 937 is 4.44344  
loss batch it 938 is 3.45142  
loss batch it 939 is 2.86221  
loss batch it 940 is 3.65254  
loss batch it 941 is 3.55604  
loss batch it 942 is 3.57794  
loss batch it 943 is 3.76384  
loss batch it 944 is 3.75729  
loss batch it 945 is 4.43781  
loss batch it 946 is 3.00459  
loss batch it 947 is 3.87855  
loss batch it 948 is 3.23385  
loss batch it 949 is 3.33138  
loss batch it 950 is 3.83250  
loss batch it 951 is 3.82917  
loss batch it 952 is 3.63260  
loss batch it 953 is 5.40711  
loss batch it 954 is 4.49905  
loss batch it 955 is 4.92909  
loss batch it 956 is 4.15877  
loss batch it 957 is 3.70038  
loss batch it 958 is 3.97313  
loss batch it 959 is 5.39959  
loss batch it 960 is 3.08318  
loss batch it 961 is 3.41759  
loss batch it 962 is 3.75109  
loss batch it 963 is 3.62566  
loss batch it 964 is 3.50565  
loss batch it 965 is 3.79347  
loss batch it 966 is 3.43727  
loss batch it 967 is 3.03493  
loss batch it 968 is 3.35128  
loss batch it 969 is 3.27311  
loss batch it 970 is 3.03642  
loss batch it 971 is 3.02349  
loss batch it 972 is 3.03765  
loss batch it 973 is 3.00985  
loss batch it 974 is 2.88277  
loss batch it 975 is 3.21626  
loss batch it 976 is 3.68586  
loss batch it 977 is 3.71506  
loss batch it 978 is 4.29231  
loss batch it 979 is 5.00852  
loss batch it 980 is 3.24536  
loss batch it 981 is 3.34044  
loss batch it 982 is 3.07745  
loss batch it 983 is 3.56566  
loss batch it 984 is 4.72170  
loss batch it 985 is 4.36731  
loss batch it 986 is 4.99566  
loss batch it 987 is 3.33128  
loss batch it 988 is 3.51385  
loss batch it 989 is 3.47872  
loss batch it 990 is 3.04758  
loss batch it 991 is 3.18064  
loss batch it 992 is 2.99314  
loss batch it 993 is 2.85235  
loss batch it 994 is 3.25152  
loss batch it 995 is 3.35058  
loss batch it 996 is 3.12686  
loss batch it 997 is 3.41406  
loss batch it 998 is 3.86423  
loss batch it 999 is 5.33594  
loss batch it 1000 is 5.35549  
loss batch it 1001 is 5.11106  
loss batch it 1002 is 5.07095  
loss batch it 1003 is 5.40212  
loss batch it 1004 is 5.27916  
loss batch it 1005 is 4.96215  
loss batch it 1006 is 4.05890  
loss batch it 1007 is 7.90679  
loss batch it 1008 is 3.70916  
loss batch it 1009 is 3.87431  
loss batch it 1010 is 4.07180  
loss batch it 1011 is 3.71665  
loss batch it 1012 is 3.68064  
loss batch it 1013 is 3.97372  
loss batch it 1014 is 4.15060  
loss batch it 1015 is 3.36026  
loss batch it 1016 is 2.67490  
loss batch it 1017 is 3.62558  
loss batch it 1018 is 3.28488  
loss batch it 1019 is 4.24363  
loss batch it 1020 is 3.02086  
loss batch it 1021 is 3.83306  
loss batch it 1022 is 3.24552  
loss batch it 1023 is 3.17816  
loss batch it 1024 is 3.36655  
loss batch it 1025 is 3.57168  
loss batch it 1026 is 2.88659  
loss batch it 1027 is 2.66319  
loss batch it 1028 is 3.39918  
loss batch it 1029 is 2.74486  
loss batch it 1030 is 4.31298  
loss batch it 1031 is 3.02365  
loss batch it 1032 is 3.33441  
loss batch it 1033 is 3.27313  
loss batch it 1034 is 4.50137  
loss batch it 1035 is 5.11491  
loss batch it 1036 is 3.18531  
loss batch it 1037 is 3.10683  
loss batch it 1038 is 2.88615  
loss batch it 1039 is 2.97396  
loss batch it 1040 is 3.25086  
loss batch it 1041 is 3.10126  
loss batch it 1042 is 2.63381  
loss batch it 1043 is 3.34904  
loss batch it 1044 is 3.27392  
loss batch it 1045 is 2.94982  
loss batch it 1046 is 3.04332  
loss batch it 1047 is 3.55572  
loss batch it 1048 is 2.93779  
loss batch it 1049 is 3.15109  
loss batch it 1050 is 2.67504  
loss batch it 1051 is 2.99331  
loss batch it 1052 is 3.05363  
loss batch it 1053 is 3.67856  
loss batch it 1054 is 2.84593  
loss batch it 1055 is 3.15205  
loss batch it 1056 is 3.25267  
loss batch it 1057 is 2.65488  
loss batch it 1058 is 2.43414  
loss batch it 1059 is 2.96187  
loss batch it 1060 is 3.07217  
loss batch it 1061 is 3.07024  
loss batch it 1062 is 3.08615  
loss batch it 1063 is 2.92373  
loss batch it 1064 is 2.85158  
loss batch it 1065 is 2.76848  
loss batch it 1066 is 2.89239  
loss batch it 1067 is 2.97935  
loss batch it 1068 is 3.32655  
loss batch it 1069 is 2.65846  
loss batch it 1070 is 2.97183  
loss batch it 1071 is 3.33498  
loss batch it 1072 is 3.13913  
loss batch it 1073 is 3.90739  
loss batch it 1074 is 3.17260  
loss batch it 1075 is 2.52896  
loss batch it 1076 is 3.29633  
loss batch it 1077 is 4.15955  
loss batch it 1078 is 4.31111  
loss batch it 1079 is 2.52215  
loss batch it 1080 is 2.91063  
loss batch it 1081 is 3.40938  
loss batch it 1082 is 3.23700  
loss batch it 1083 is 3.52275  
loss batch it 1084 is 3.95494  
loss batch it 1085 is 2.62878  
loss batch it 1086 is 2.98694  
loss batch it 1087 is 3.02195  
loss batch it 1088 is 2.22829  
loss batch it 1089 is 2.93797  
loss batch it 1090 is 3.09771  
loss batch it 1091 is 2.96641  
loss batch it 1092 is 3.60779  
loss batch it 1093 is 2.98625  
loss batch it 1094 is 2.55167  
loss batch it 1095 is 2.61976  
loss batch it 1096 is 2.57849  
loss batch it 1097 is 2.95071  
loss batch it 1098 is 2.85759  
loss batch it 1099 is 2.67043  
loss batch it 1100 is 3.35728  
loss batch it 1101 is 3.34471  
loss batch it 1102 is 3.17077  
loss batch it 1103 is 3.61397  
loss batch it 1104 is 5.22216  
loss batch it 1105 is 2.44757  
loss batch it 1106 is 2.70737  
loss batch it 1107 is 2.52326  
loss batch it 1108 is 3.25238  
loss batch it 1109 is 2.85890  
loss batch it 1110 is 2.55985  
loss batch it 1111 is 2.39264  
loss batch it 1112 is 2.74356  
loss batch it 1113 is 2.68757  
loss batch it 1114 is 2.71888  
loss batch it 1115 is 3.01423  
loss batch it 1116 is 2.62549  
loss batch it 1117 is 3.27848  
loss batch it 1118 is 2.93795  
loss batch it 1119 is 2.75913  
loss batch it 1120 is 2.74248  
loss batch it 1121 is 2.61663  
loss batch it 1122 is 3.25367  
loss batch it 1123 is 2.92990  
loss batch it 1124 is 2.38460  
loss batch it 1125 is 2.42140  
loss batch it 1126 is 2.40799  
loss batch it 1127 is 2.92628  
loss batch it 1128 is 2.75730  
loss batch it 1129 is 2.80494  
loss batch it 1130 is 2.84416  
loss batch it 1131 is 2.79042  
loss batch it 1132 is 2.81306  
loss batch it 1133 is 2.56873  
loss batch it 1134 is 2.74113  
loss batch it 1135 is 2.70558  
loss batch it 1136 is 2.82141  
loss batch it 1137 is 3.38208  
loss batch it 1138 is 2.82293  
loss batch it 1139 is 3.06651  
loss batch it 1140 is 3.54369  
loss batch it 1141 is 3.36350  
loss batch it 1142 is 2.31070  
loss batch it 1143 is 2.29416  
loss batch it 1144 is 3.26933  
loss batch it 1145 is 2.95611  
loss batch it 1146 is 2.92205  
loss batch it 1147 is 2.71630  
loss batch it 1148 is 2.32548  
loss batch it 1149 is 2.41992  
loss batch it 1150 is 2.65074  
loss batch it 1151 is 2.33443  
loss batch it 1152 is 2.77326  
loss batch it 1153 is 2.76447  
loss batch it 1154 is 2.45689  
loss batch it 1155 is 2.33649  
loss batch it 1156 is 2.64209  
loss batch it 1157 is 2.39845  
loss batch it 1158 is 2.55515  
loss batch it 1159 is 2.34855  
loss batch it 1160 is 2.91660  
loss batch it 1161 is 2.92885  
loss batch it 1162 is 2.61037  
loss batch it 1163 is 3.52852  
loss batch it 1164 is 3.83727  
loss batch it 1165 is 3.58465  
loss batch it 1166 is 3.51340  
loss batch it 1167 is 3.19035  
loss batch it 1168 is 2.73802  
loss batch it 1169 is 2.40210  
loss batch it 1170 is 2.69358  
loss batch it 1171 is 2.71093  
loss batch it 1172 is 2.53835  
loss batch it 1173 is 3.20087  
loss batch it 1174 is 2.50795  
loss batch it 1175 is 4.43096  
loss batch it 1176 is 2.72548  
loss batch it 1177 is 2.44149  
loss batch it 1178 is 2.68412  
loss batch it 1179 is 2.52517  
loss batch it 1180 is 2.52453  
loss batch it 1181 is 2.58300  
loss batch it 1182 is 2.98040  
loss batch it 1183 is 2.14712  
loss batch it 1184 is 3.20576  
loss batch it 1185 is 2.81437  
loss batch it 1186 is 3.81060  
loss batch it 1187 is 3.18598  
loss batch it 1188 is 2.42018  
loss batch it 1189 is 2.86846  
loss batch it 1190 is 2.93968  
loss batch it 1191 is 2.91923  
loss batch it 1192 is 2.95640  
loss batch it 1193 is 2.38418  
loss batch it 1194 is 2.45175  
loss batch it 1195 is 3.76974  
loss batch it 1196 is 2.99994  
loss batch it 1197 is 2.40354  
loss batch it 1198 is 2.36979  
loss batch it 1199 is 2.43901  
loss batch it 1200 is 2.43493  
loss batch it 1201 is 2.71708  
loss batch it 1202 is 3.46413  
loss batch it 1203 is 2.56074  
loss batch it 1204 is 2.35520  
loss batch it 1205 is 2.35310  
loss batch it 1206 is 4.97256  
loss batch it 1207 is 2.22998  
loss batch it 1208 is 3.46391  
loss batch it 1209 is 2.25050  
loss batch it 1210 is 2.43818  
loss batch it 1211 is 2.99276  
loss batch it 1212 is 2.79559  
loss batch it 1213 is 4.06230  
loss batch it 1214 is 2.39002  
loss batch it 1215 is 2.24011  
loss batch it 1216 is 2.93088  
loss batch it 1217 is 2.58428  
loss batch it 1218 is 2.50940  
loss batch it 1219 is 2.44761  
loss batch it 1220 is 3.05043  
loss batch it 1221 is 2.60361  
loss batch it 1222 is 3.27557  
loss batch it 1223 is 3.28426  
loss batch it 1224 is 3.16994  
loss batch it 1225 is 2.84281  
loss batch it 1226 is 2.43913  
loss batch it 1227 is 2.34793  
loss batch it 1228 is 2.24089  
loss batch it 1229 is 2.33888  
loss batch it 1230 is 2.34667  
loss batch it 1231 is 2.71542  
loss batch it 1232 is 3.11624  
loss batch it 1233 is 2.27205  
loss batch it 1234 is 2.32229  
loss batch it 1235 is 2.05984  
loss batch it 1236 is 2.02678  
loss batch it 1237 is 2.91192  
loss batch it 1238 is 2.07495  
loss batch it 1239 is 2.80362  
loss batch it 1240 is 2.42761  
loss batch it 1241 is 2.30209  
loss batch it 1242 is 2.39325  
loss batch it 1243 is 2.41736  
loss batch it 1244 is 2.19774  
loss batch it 1245 is 2.40760  
loss batch it 1246 is 2.73030  
loss batch it 1247 is 2.55275  
loss batch it 1248 is 2.96780  
loss batch it 1249 is 2.70661  
loss batch it 1250 is 2.28572  
loss batch it 1251 is 2.17678  
loss batch it 1252 is 2.35286  
loss batch it 1253 is 2.12732  
loss batch it 1254 is 2.42700  
loss batch it 1255 is 2.62472  
loss batch it 1256 is 2.57736  
loss batch it 1257 is 2.06134  
loss batch it 1258 is 2.09296  
loss batch it 1259 is 2.01952  
loss batch it 1260 is 2.81771  
loss batch it 1261 is 2.80488  
loss batch it 1262 is 2.28812  
loss batch it 1263 is 2.45356  
loss batch it 1264 is 2.78667  
loss batch it 1265 is 2.62422  
loss batch it 1266 is 2.52395  
loss batch it 1267 is 2.52827  
loss batch it 1268 is 2.14952  
loss batch it 1269 is 2.43580  
loss batch it 1270 is 2.43527  
loss batch it 1271 is 2.25482  
loss batch it 1272 is 2.14864  
loss batch it 1273 is 2.13857  
loss batch it 1274 is 2.26334  
loss batch it 1275 is 2.27695  
loss batch it 1276 is 2.34254  
loss batch it 1277 is 2.54686  
loss batch it 1278 is 3.02384  
loss batch it 1279 is 3.02942  
loss batch it 1280 is 2.38491  
loss batch it 1281 is 2.16321  
loss batch it 1282 is 2.60980  
loss batch it 1283 is 2.08604  
loss batch it 1284 is 2.48245  
loss batch it 1285 is 2.56672  
loss batch it 1286 is 2.09590  
loss batch it 1287 is 2.71799  
loss batch it 1288 is 2.35355  
loss batch it 1289 is 2.12586  
loss batch it 1290 is 2.35337  
loss batch it 1291 is 2.31910  
loss batch it 1292 is 2.33062  
loss batch it 1293 is 1.92428  
loss batch it 1294 is 2.05092  
loss batch it 1295 is 2.25574  
loss batch it 1296 is 2.17922  
loss batch it 1297 is 1.99684  
loss batch it 1298 is 2.57435  
loss batch it 1299 is 1.98943  
loss batch it 1300 is 2.21043  
loss batch it 1301 is 1.99666  
loss batch it 1302 is 2.39681  
loss batch it 1303 is 2.30188  
loss batch it 1304 is 2.02934  
loss batch it 1305 is 2.52156  
loss batch it 1306 is 2.60854  
loss batch it 1307 is 2.21545  
loss batch it 1308 is 1.91567  
loss batch it 1309 is 2.17502  
loss batch it 1310 is 2.23439  
loss batch it 1311 is 1.89805  
loss batch it 1312 is 2.66956  
loss batch it 1313 is 2.21389  
loss batch it 1314 is 2.33255  
loss batch it 1315 is 2.23356  
loss batch it 1316 is 2.21797  
loss batch it 1317 is 2.21263  
loss batch it 1318 is 2.66142  
loss batch it 1319 is 2.18657  
loss batch it 1320 is 2.60346  
loss batch it 1321 is 2.51025  
loss batch it 1322 is 2.20807  
loss batch it 1323 is 2.15509  
loss batch it 1324 is 2.15700  
loss batch it 1325 is 2.35550  
loss batch it 1326 is 3.69932  
loss batch it 1327 is 3.60505  
loss batch it 1328 is 2.12754  
loss batch it 1329 is 3.12369  
loss batch it 1330 is 2.32093  
loss batch it 1331 is 2.47361  
loss batch it 1332 is 2.32417  
loss batch it 1333 is 2.00152  
loss batch it 1334 is 2.04114  
loss batch it 1335 is 1.98358  
loss batch it 1336 is 2.45778  
loss batch it 1337 is 2.02912  
loss batch it 1338 is 2.24856  
loss batch it 1339 is 1.92156  
loss batch it 1340 is 2.12149  
loss batch it 1341 is 2.35941  
loss batch it 1342 is 3.19200  
loss batch it 1343 is 2.40124  
loss batch it 1344 is 1.93094  
loss batch it 1345 is 2.25293  
loss batch it 1346 is 2.14453  
loss batch it 1347 is 2.34091  
loss batch it 1348 is 3.48790  
loss batch it 1349 is 1.93112  
loss batch it 1350 is 3.40448  
loss batch it 1351 is 1.96065  
loss batch it 1352 is 2.18645  
loss batch it 1353 is 2.38652  
loss batch it 1354 is 2.44736  
loss batch it 1355 is 2.15929  
loss batch it 1356 is 3.20339  
loss batch it 1357 is 1.99093  
loss batch it 1358 is 2.29901  
loss batch it 1359 is 2.22391  
loss batch it 1360 is 2.52140  
loss batch it 1361 is 2.58061  
loss batch it 1362 is 2.06963  
loss batch it 1363 is 2.01842  
loss batch it 1364 is 2.19316  
loss batch it 1365 is 2.20101  
loss batch it 1366 is 2.11518  
loss batch it 1367 is 1.96705  
loss batch it 1368 is 2.50558  
loss batch it 1369 is 1.92856  
loss batch it 1370 is 2.20879  
loss batch it 1371 is 2.88173  
loss batch it 1372 is 2.12893  
loss batch it 1373 is 2.10746  
loss batch it 1374 is 2.34193  
loss batch it 1375 is 2.69496  
loss batch it 1376 is 2.06011  
loss batch it 1377 is 2.43719  
loss batch it 1378 is 1.70998  
loss batch it 1379 is 2.52888  
loss batch it 1380 is 2.11232  
loss batch it 1381 is 2.18240  
loss batch it 1382 is 2.19481  
loss batch it 1383 is 2.70350  
loss batch it 1384 is 2.11140  
loss batch it 1385 is 2.18257  
loss batch it 1386 is 1.95364  
loss batch it 1387 is 2.06972  
loss batch it 1388 is 3.05833  
loss batch it 1389 is 2.37164  
loss batch it 1390 is 2.01884  
loss batch it 1391 is 2.12310  
loss batch it 1392 is 2.42993  
loss batch it 1393 is 2.22242  
loss batch it 1394 is 2.51216  
loss batch it 1395 is 2.93830  
loss batch it 1396 is 2.56155  
loss batch it 1397 is 2.47896  
loss batch it 1398 is 2.02753  
loss batch it 1399 is 2.54161  
loss batch it 1400 is 2.38141  
loss batch it 1401 is 2.54513  
loss batch it 1402 is 2.27872  
loss batch it 1403 is 1.83629  
loss batch it 1404 is 1.85133  
loss batch it 1405 is 1.87624  
loss batch it 1406 is 1.82806  
loss batch it 1407 is 1.85198  
loss batch it 1408 is 2.09534  
loss batch it 1409 is 3.35448  
loss batch it 1410 is 3.04016  
loss batch it 1411 is 1.90227  
loss batch it 1412 is 1.90341  
loss batch it 1413 is 2.61958  
loss batch it 1414 is 2.12348  
loss batch it 1415 is 1.95345  
loss batch it 1416 is 1.94159  
loss batch it 1417 is 2.68151  
loss batch it 1418 is 3.21381  
loss batch it 1419 is 2.33926  
loss batch it 1420 is 2.04059  
loss batch it 1421 is 2.19380  
loss batch it 1422 is 1.99719  
loss batch it 1423 is 1.89579  
loss batch it 1424 is 2.04188  
loss batch it 1425 is 2.20824  
loss batch it 1426 is 2.07916  
loss batch it 1427 is 2.28983  
loss batch it 1428 is 2.78695  
loss batch it 1429 is 2.34072  
loss batch it 1430 is 2.85646  
loss batch it 1431 is 3.30779  
loss batch it 1432 is 1.93565  
loss batch it 1433 is 3.12749  
loss batch it 1434 is 1.92680  
loss batch it 1435 is 1.80788  
loss batch it 1436 is 1.96673  
loss batch it 1437 is 2.72016  
loss batch it 1438 is 1.91021  
loss batch it 1439 is 1.85270  
loss batch it 1440 is 1.66862  
loss batch it 1441 is 1.74624  
loss batch it 1442 is 1.99831  
loss batch it 1443 is 1.82458  
loss batch it 1444 is 1.78131  
loss batch it 1445 is 1.78360  
loss batch it 1446 is 1.66129  
loss batch it 1447 is 2.07528  
loss batch it 1448 is 1.78503  
loss batch it 1449 is 2.16968  
loss batch it 1450 is 1.55276  
loss batch it 1451 is 1.66183  
loss batch it 1452 is 1.93340  
loss batch it 1453 is 1.95713  
loss batch it 1454 is 1.82291  
loss batch it 1455 is 2.20265  
loss batch it 1456 is 2.57120  
loss batch it 1457 is 1.93778  
loss batch it 1458 is 2.19794  
loss batch it 1459 is 2.40660  
loss batch it 1460 is 2.38295  
loss batch it 1461 is 2.05186  
loss batch it 1462 is 2.84501  
loss batch it 1463 is 2.06068  
loss batch it 1464 is 1.80817  
loss batch it 1465 is 1.88952  
loss batch it 1466 is 1.84035  
loss batch it 1467 is 2.02351  
loss batch it 1468 is 1.72396  
loss batch it 1469 is 1.71877  
loss batch it 1470 is 2.48044  
loss batch it 1471 is 1.88660  
loss batch it 1472 is 1.92992  
loss batch it 1473 is 1.80792  
loss batch it 1474 is 2.37483  
loss batch it 1475 is 3.56553  
loss batch it 1476 is 1.59331  
loss batch it 1477 is 1.78504  
loss batch it 1478 is 1.73637  
loss batch it 1479 is 2.29677  
loss batch it 1480 is 2.12425  
loss batch it 1481 is 1.58983  
loss batch it 1482 is 1.90192  
loss batch it 1483 is 2.16166  
loss batch it 1484 is 1.96004  
loss batch it 1485 is 2.53334  
loss batch it 1486 is 3.65981  
loss batch it 1487 is 1.93927  
loss batch it 1488 is 2.12939  
loss batch it 1489 is 2.11628  
loss batch it 1490 is 2.42161  
loss batch it 1491 is 2.00370  
loss batch it 1492 is 2.40744  
loss batch it 1493 is 2.04052  
loss batch it 1494 is 1.67209  
loss batch it 1495 is 1.60920  
loss batch it 1496 is 1.69860  
loss batch it 1497 is 1.72270  
loss batch it 1498 is 1.58132  
loss batch it 1499 is 2.01065  
loss batch it 1500 is 1.95517  
loss batch it 1501 is 2.08151  
loss batch it 1502 is 2.09454  
loss batch it 1503 is 1.70640  
loss batch it 1504 is 1.85283  
loss batch it 1505 is 1.82965  
loss batch it 1506 is 1.85465  
loss batch it 1507 is 1.61258  
loss batch it 1508 is 1.89843  
loss batch it 1509 is 1.57377  
loss batch it 1510 is 1.70689  
loss batch it 1511 is 1.82152  
loss batch it 1512 is 1.88879  
loss batch it 1513 is 1.77430  
loss batch it 1514 is 1.60160  
loss batch it 1515 is 1.88823  
loss batch it 1516 is 1.75534  
loss batch it 1517 is 1.81131  
loss batch it 1518 is 1.65683  
loss batch it 1519 is 2.11845  
loss batch it 1520 is 1.98188  
loss batch it 1521 is 1.83974  
loss batch it 1522 is 1.76147  
loss batch it 1523 is 1.65059  
loss batch it 1524 is 1.83226  
loss batch it 1525 is 3.20641  
loss batch it 1526 is 2.49452  
loss batch it 1527 is 2.15368  
loss batch it 1528 is 2.16553  
loss batch it 1529 is 1.76730  
loss batch it 1530 is 1.81086  
loss batch it 1531 is 1.64705  
loss batch it 1532 is 1.60373  
loss batch it 1533 is 1.67974  
loss batch it 1534 is 1.91657  
loss batch it 1535 is 1.91480  
loss batch it 1536 is 1.37377  
loss batch it 1537 is 1.68915  
loss batch it 1538 is 1.64348  
loss batch it 1539 is 1.97745  
loss batch it 1540 is 2.94858  
loss batch it 1541 is 2.02245  
loss batch it 1542 is 1.99533  
loss batch it 1543 is 2.08884  
loss batch it 1544 is 2.27565  
loss batch it 1545 is 1.98087  
loss batch it 1546 is 1.53897  
loss batch it 1547 is 1.77279  
loss batch it 1548 is 1.67667  
loss batch it 1549 is 1.57702  
loss batch it 1550 is 1.60051  
loss batch it 1551 is 1.75469  
loss batch it 1552 is 2.11865  
loss batch it 1553 is 2.39268  
loss batch it 1554 is 1.94647  
loss batch it 1555 is 2.03409  
loss batch it 1556 is 1.79805  
loss batch it 1557 is 2.30164  
loss batch it 1558 is 1.80371  
loss batch it 1559 is 2.51794  
loss batch it 1560 is 2.17050  
loss batch it 1561 is 1.67964  
loss batch it 1562 is 1.78502  
loss batch it 1563 is 2.02418  
loss batch it 1564 is 1.55371  
loss batch it 1565 is 1.65415  
loss batch it 1566 is 2.57946  
loss batch it 1567 is 1.64237  
loss batch it 1568 is 1.52548  
loss batch it 1569 is 1.59340  
loss batch it 1570 is 1.81489  
loss batch it 1571 is 1.90073  
loss batch it 1572 is 1.62567  
loss batch it 1573 is 1.60404  
loss batch it 1574 is 1.57380  
loss batch it 1575 is 2.20807  
loss batch it 1576 is 1.68384  
loss batch it 1577 is 2.11196  
loss batch it 1578 is 2.21674  
loss batch it 1579 is 1.74805  
loss batch it 1580 is 1.95156  
loss batch it 1581 is 2.69752  
loss batch it 1582 is 1.88576  
loss batch it 1583 is 1.76734  
loss batch it 1584 is 1.65772  
loss batch it 1585 is 1.76555  
loss batch it 1586 is 1.65521  
loss batch it 1587 is 1.64650  
loss batch it 1588 is 1.67213  
loss batch it 1589 is 1.75947  
loss batch it 1590 is 1.81578  
loss batch it 1591 is 1.68116  
loss batch it 1592 is 1.62290  
loss batch it 1593 is 1.57393  
loss batch it 1594 is 2.81838  
loss batch it 1595 is 6.21431  
loss batch it 1596 is 1.97480  
loss batch it 1597 is 2.64508  
loss batch it 1598 is 1.74184  
loss batch it 1599 is 1.67949  
loss batch it 1600 is 1.71717  
loss batch it 1601 is 1.65107  
loss batch it 1602 is 1.68004  
loss batch it 1603 is 1.74712  
loss batch it 1604 is 1.74282  
loss batch it 1605 is 2.01339  
loss batch it 1606 is 1.75834  
loss batch it 1607 is 1.59853  
loss batch it 1608 is 1.62558  
loss batch it 1609 is 1.72540  
loss batch it 1610 is 1.78118  
loss batch it 1611 is 1.89474  
loss batch it 1612 is 2.03169  
loss batch it 1613 is 2.03706  
loss batch it 1614 is 1.89513  
loss batch it 1615 is 1.86681  
loss batch it 1616 is 2.15613  
loss batch it 1617 is 2.41118  
loss batch it 1618 is 1.68556  
loss batch it 1619 is 1.48009  
loss batch it 1620 is 1.67401  
loss batch it 1621 is 1.96542  
loss batch it 1622 is 1.66837  
loss batch it 1623 is 1.62360  
loss batch it 1624 is 1.60134  
loss batch it 1625 is 1.66122  
loss batch it 1626 is 1.50937  
loss batch it 1627 is 1.61655  
loss batch it 1628 is 1.66828  
loss batch it 1629 is 1.54366  
loss batch it 1630 is 1.55427  
loss batch it 1631 is 1.99412  
loss batch it 1632 is 1.93117  
loss batch it 1633 is 1.68141  
loss batch it 1634 is 2.06985  
loss batch it 1635 is 2.33711  
loss batch it 1636 is 2.11063  
loss batch it 1637 is 2.23739  
loss batch it 1638 is 2.14444  
loss batch it 1639 is 1.76348  
loss batch it 1640 is 1.81630  
loss batch it 1641 is 1.59035  
loss batch it 1642 is 1.82949  
loss batch it 1643 is 1.92232  
loss batch it 1644 is 1.75859  
loss batch it 1645 is 1.76362  
loss batch it 1646 is 2.54201  
loss batch it 1647 is 2.12447  
loss batch it 1648 is 2.49107  
loss batch it 1649 is 1.70600  
loss batch it 1650 is 2.63365  
loss batch it 1651 is 1.52951  
loss batch it 1652 is 2.22934  
loss batch it 1653 is 1.69599  
loss batch it 1654 is 1.78395  
loss batch it 1655 is 1.72885  
loss batch it 1656 is 1.57365  
loss batch it 1657 is 1.95469  
loss batch it 1658 is 1.77428  
loss batch it 1659 is 1.65795  
loss batch it 1660 is 1.70248  
loss batch it 1661 is 2.16342  
loss batch it 1662 is 1.53143  
loss batch it 1663 is 1.94670  
loss batch it 1664 is 1.59039  
loss batch it 1665 is 1.92458  
loss batch it 1666 is 1.54510  
loss batch it 1667 is 1.50796  
loss batch it 1668 is 1.94747  
loss batch it 1669 is 1.82466  
loss batch it 1670 is 2.09810  
loss batch it 1671 is 1.46926  
loss batch it 1672 is 1.65026  
loss batch it 1673 is 1.62848  
loss batch it 1674 is 1.92993  
loss batch it 1675 is 1.50994  
loss batch it 1676 is 1.74010  
loss batch it 1677 is 1.64022  
loss batch it 1678 is 1.70597  
loss batch it 1679 is 1.78578  
loss batch it 1680 is 1.80836  
loss batch it 1681 is 2.00957  
loss batch it 1682 is 2.38240  
loss batch it 1683 is 1.96930  
loss batch it 1684 is 1.76435  
loss batch it 1685 is 1.58757  
loss batch it 1686 is 1.84271  
loss batch it 1687 is 1.48263  
loss batch it 1688 is 1.37992  
loss batch it 1689 is 1.44575  
loss batch it 1690 is 1.76873  
loss batch it 1691 is 3.75897  
loss batch it 1692 is 1.61204  
loss batch it 1693 is 1.34679  
loss batch it 1694 is 1.52174  
loss batch it 1695 is 1.51456  
loss batch it 1696 is 1.27346  
loss batch it 1697 is 1.58920  
loss batch it 1698 is 1.39067  
loss batch it 1699 is 1.99387  
loss batch it 1700 is 1.54074  
loss batch it 1701 is 1.28713  
loss batch it 1702 is 1.51027  
loss batch it 1703 is 1.51076  
loss batch it 1704 is 1.52208  
loss batch it 1705 is 1.47993  
loss batch it 1706 is 1.68346  
loss batch it 1707 is 2.30095  
loss batch it 1708 is 2.90622  
loss batch it 1709 is 2.58670  
loss batch it 1710 is 1.39660  
loss batch it 1711 is 1.55500  
loss batch it 1712 is 2.10416  
loss batch it 1713 is 1.67942  
loss batch it 1714 is 1.62604  
loss batch it 1715 is 1.72558  
loss batch it 1716 is 1.64471  
loss batch it 1717 is 1.45583  
loss batch it 1718 is 1.86677  
loss batch it 1719 is 1.57055  
loss batch it 1720 is 1.39441  
loss batch it 1721 is 2.12092  
loss batch it 1722 is 2.17021  
loss batch it 1723 is 1.52981  
loss batch it 1724 is 1.48413  
loss batch it 1725 is 1.56764  
loss batch it 1726 is 1.44074  
loss batch it 1727 is 1.64167  
loss batch it 1728 is 1.80195  
loss batch it 1729 is 2.50404  
loss batch it 1730 is 1.79802  
loss batch it 1731 is 1.68280  
loss batch it 1732 is 1.75029  
loss batch it 1733 is 1.36865  
loss batch it 1734 is 1.70188  
loss batch it 1735 is 1.59013  
loss batch it 1736 is 1.63832  
loss batch it 1737 is 1.60834  
loss batch it 1738 is 1.45956  
loss batch it 1739 is 1.53265  
loss batch it 1740 is 1.79109  
loss batch it 1741 is 1.52490  
loss batch it 1742 is 1.39089  
loss batch it 1743 is 1.48117  
loss batch it 1744 is 1.56026  
loss batch it 1745 is 1.49125  
loss batch it 1746 is 1.73985  
loss batch it 1747 is 1.72814  
loss batch it 1748 is 1.57700  
loss batch it 1749 is 1.45419  
loss batch it 1750 is 1.52417  
loss batch it 1751 is 1.73863  
loss batch it 1752 is 2.04065  
loss batch it 1753 is 1.73193  
loss batch it 1754 is 1.32439  
loss batch it 1755 is 1.52912  
loss batch it 1756 is 1.43174  
loss batch it 1757 is 1.55075  
loss batch it 1758 is 1.48724  
loss batch it 1759 is 1.41147  
loss batch it 1760 is 1.89618  
loss batch it 1761 is 1.48698  
loss batch it 1762 is 1.63772  
loss batch it 1763 is 2.22888  
loss batch it 1764 is 1.42589  
loss batch it 1765 is 1.51032  
loss batch it 1766 is 1.46546  
loss batch it 1767 is 1.51536  
loss batch it 1768 is 1.35015  
loss batch it 1769 is 1.63943  
loss batch it 1770 is 2.38382  
loss batch it 1771 is 1.35463  
loss batch it 1772 is 1.62766  
loss batch it 1773 is 1.66526  
loss batch it 1774 is 1.50528  
loss batch it 1775 is 1.47465  
loss batch it 1776 is 1.62619  
loss batch it 1777 is 1.39447  
loss batch it 1778 is 1.55112  
loss batch it 1779 is 1.35548  
loss batch it 1780 is 2.18158  
loss batch it 1781 is 1.39787  
loss batch it 1782 is 1.66909  
loss batch it 1783 is 1.50911  
loss batch it 1784 is 1.60140  
loss batch it 1785 is 2.42076  
loss batch it 1786 is 1.45309  
loss batch it 1787 is 1.61482  
loss batch it 1788 is 1.41946  
loss batch it 1789 is 1.42262  
loss batch it 1790 is 1.40562  
loss batch it 1791 is 2.84527  
loss batch it 1792 is 1.81546  
loss batch it 1793 is 1.66180  
loss batch it 1794 is 1.57401  
loss batch it 1795 is 1.61650  
loss batch it 1796 is 1.58859  
loss batch it 1797 is 1.47211  
loss batch it 1798 is 1.48339  
loss batch it 1799 is 1.34204  
loss batch it 1800 is 1.43569  
loss batch it 1801 is 1.47778  
loss batch it 1802 is 1.49528  
loss batch it 1803 is 1.70451  
loss batch it 1804 is 1.46689  
loss batch it 1805 is 1.65057  
loss batch it 1806 is 1.66226  
loss batch it 1807 is 1.63932  
loss batch it 1808 is 1.35362  
loss batch it 1809 is 1.36261  
loss batch it 1810 is 1.42115  
loss batch it 1811 is 1.50389  
loss batch it 1812 is 1.57267  
loss batch it 1813 is 1.75737  
loss batch it 1814 is 1.87616  
loss batch it 1815 is 1.53724  
loss batch it 1816 is 1.94162  
loss batch it 1817 is 1.41085  
loss batch it 1818 is 1.64060  
loss batch it 1819 is 1.65708  
loss batch it 1820 is 1.26201  
loss batch it 1821 is 1.66271  
loss batch it 1822 is 1.60439  
loss batch it 1823 is 1.41619  
loss batch it 1824 is 1.59932  
loss batch it 1825 is 1.33103  
loss batch it 1826 is 1.32619  
loss batch it 1827 is 1.35583  
loss batch it 1828 is 1.41425  
loss batch it 1829 is 2.25715  
loss batch it 1830 is 1.47059  
loss batch it 1831 is 1.42313  
loss batch it 1832 is 1.38838  
loss batch it 1833 is 1.40245  
loss batch it 1834 is 2.26136  
loss batch it 1835 is 1.48070  
loss batch it 1836 is 1.45772  
loss batch it 1837 is 1.75655  
loss batch it 1838 is 1.42094  
loss batch it 1839 is 1.62293  
loss batch it 1840 is 1.79956  
loss batch it 1841 is 1.84242  
loss batch it 1842 is 1.91615  
loss batch it 1843 is 1.34886  
loss batch it 1844 is 1.37995  
loss batch it 1845 is 1.41572  
loss batch it 1846 is 1.25645  
loss batch it 1847 is 1.42063  
loss batch it 1848 is 1.55749  
loss batch it 1849 is 1.35690  
loss batch it 1850 is 1.29443  
loss batch it 1851 is 1.44016  
loss batch it 1852 is 1.68094  
loss batch it 1853 is 1.38150  
loss batch it 1854 is 1.59826  
loss batch it 1855 is 1.56211  
loss batch it 1856 is 1.65391  
loss batch it 1857 is 1.73423  
loss batch it 1858 is 2.21123  
loss batch it 1859 is 1.74876  
loss batch it 1860 is 1.92992  
loss batch it 1861 is 1.78891  
loss batch it 1862 is 1.54609  
loss batch it 1863 is 1.43621  
loss batch it 1864 is 1.55361  
loss batch it 1865 is 1.34184  
loss batch it 1866 is 1.29568  
loss batch it 1867 is 1.38994  
loss batch it 1868 is 1.31872  
loss batch it 1869 is 1.33652  
loss batch it 1870 is 1.46712  
loss batch it 1871 is 1.85310  
loss batch it 1872 is 1.39938  
loss batch it 1873 is 1.28992  
loss batch it 1874 is 1.34100  
loss batch it 1875 is 1.36850  
loss batch it 1876 is 1.76346  
loss batch it 1877 is 1.42987  
loss batch it 1878 is 1.58253  
loss batch it 1879 is 1.34132  
loss batch it 1880 is 1.61782  
loss batch it 1881 is 1.42478  
loss batch it 1882 is 1.41667  
loss batch it 1883 is 1.41770  
loss batch it 1884 is 1.63142  
loss batch it 1885 is 1.43783  
loss batch it 1886 is 1.23232  
loss batch it 1887 is 1.41338  
loss batch it 1888 is 1.29830  
loss batch it 1889 is 1.26828  
loss batch it 1890 is 1.59553  
loss batch it 1891 is 1.54259  
loss batch it 1892 is 1.36974  
loss batch it 1893 is 1.29947  
loss batch it 1894 is 1.52987  
loss batch it 1895 is 1.17503  
loss batch it 1896 is 1.27492  
loss batch it 1897 is 1.25790  
loss batch it 1898 is 1.58902  
loss batch it 1899 is 1.51937  
loss batch it 1900 is 1.46786  
loss batch it 1901 is 1.30210  
loss batch it 1902 is 1.47024  
loss batch it 1903 is 1.27147  
loss batch it 1904 is 1.20222  
loss batch it 1905 is 1.41103  
loss batch it 1906 is 1.34545  
loss batch it 1907 is 1.46174  
loss batch it 1908 is 1.51491  
loss batch it 1909 is 1.72864  
loss batch it 1910 is 1.54749  
loss batch it 1911 is 1.23444  
loss batch it 1912 is 1.35253  
loss batch it 1913 is 2.04841  
loss batch it 1914 is 1.71690  
loss batch it 1915 is 2.43614  
loss batch it 1916 is 1.34286  
loss batch it 1917 is 1.35811  
loss batch it 1918 is 1.45304  
loss batch it 1919 is 1.36117  
loss batch it 1920 is 1.77876  
loss batch it 1921 is 1.50961  
loss batch it 1922 is 1.25076  
loss batch it 1923 is 1.29404  
loss batch it 1924 is 1.13700  
loss batch it 1925 is 1.30320  
loss batch it 1926 is 1.25618  
loss batch it 1927 is 1.27713  
loss batch it 1928 is 1.33089  
loss batch it 1929 is 1.24258  
loss batch it 1930 is 1.28059  
loss batch it 1931 is 1.32644  
loss batch it 1932 is 2.48116  
loss batch it 1933 is 1.51262  
loss batch it 1934 is 1.84246  
loss batch it 1935 is 1.58539  
loss batch it 1936 is 1.41134  
loss batch it 1937 is 1.37252  
loss batch it 1938 is 1.18505  
loss batch it 1939 is 1.34868  
loss batch it 1940 is 1.37731  
loss batch it 1941 is 1.31790  
loss batch it 1942 is 1.17648  
loss batch it 1943 is 1.25909  
loss batch it 1944 is 1.32648  
loss batch it 1945 is 1.46116  
loss batch it 1946 is 1.92648  
loss batch it 1947 is 1.44351  
loss batch it 1948 is 1.67316  
loss batch it 1949 is 1.11878  
loss batch it 1950 is 1.40551  
loss batch it 1951 is 1.24035  
loss batch it 1952 is 1.16781  
loss batch it 1953 is 1.33558  
loss batch it 1954 is 1.52780  
loss batch it 1955 is 1.48096  
loss batch it 1956 is 1.39918  
loss batch it 1957 is 1.30444  
loss batch it 1958 is 1.48760  
loss batch it 1959 is 1.31950  
loss batch it 1960 is 1.17360  
loss batch it 1961 is 1.52185  
loss batch it 1962 is 1.41182  
loss batch it 1963 is 1.72879  
loss batch it 1964 is 1.24617  
loss batch it 1965 is 1.32797  
loss batch it 1966 is 1.34949  
loss batch it 1967 is 1.32840  
loss batch it 1968 is 1.28160  
loss batch it 1969 is 1.39006  
loss batch it 1970 is 2.64739  
loss batch it 1971 is 1.35982  
loss batch it 1972 is 1.70734  
loss batch it 1973 is 1.24917  
loss batch it 1974 is 1.51092  
loss batch it 1975 is 1.74435  
loss batch it 1976 is 1.30704  
loss batch it 1977 is 1.21486  
loss batch it 1978 is 1.74000  
loss batch it 1979 is 1.25241  
loss batch it 1980 is 1.39019  
loss batch it 1981 is 1.28646  
loss batch it 1982 is 1.56360  
loss batch it 1983 is 1.33765  
loss batch it 1984 is 1.57534  
loss batch it 1985 is 1.47480  
loss batch it 1986 is 1.25609  
loss batch it 1987 is 1.40043  
loss batch it 1988 is 1.31679  
loss batch it 1989 is 1.32530  
loss batch it 1990 is 1.44764  
loss batch it 1991 is 1.43922  
loss batch it 1992 is 1.53741  
loss batch it 1993 is 1.34885  
loss batch it 1994 is 1.53514  
loss batch it 1995 is 1.22278  
loss batch it 1996 is 1.61219  
loss batch it 1997 is 1.21602  
loss batch it 1998 is 1.40496  
loss batch it 1999 is 1.19879  
loss batch it 2000 is 1.26168  
loss batch it 2001 is 1.36405  
loss batch it 2002 is 1.39036  
loss batch it 2003 is 1.71455  
loss batch it 2004 is 1.29673  
loss batch it 2005 is 1.32851  
loss batch it 2006 is 1.29528  
loss batch it 2007 is 1.27296  
loss batch it 2008 is 1.14086  
loss batch it 2009 is 1.68371  
loss batch it 2010 is 1.35926  
loss batch it 2011 is 1.32705  
loss batch it 2012 is 2.10239  
loss batch it 2013 is 1.21641  
loss batch it 2014 is 1.84934  
loss batch it 2015 is 1.18872  
loss batch it 2016 is 1.30874  
loss batch it 2017 is 1.61411  
loss batch it 2018 is 1.41778  
loss batch it 2019 is 1.19778  
loss batch it 2020 is 1.26427  
loss batch it 2021 is 1.17848  
loss batch it 2022 is 1.28194  
loss batch it 2023 is 1.14770  
loss batch it 2024 is 1.29003  
loss batch it 2025 is 1.45602  
loss batch it 2026 is 1.34481  
loss batch it 2027 is 1.58544  
loss batch it 2028 is 1.13145  
loss batch it 2029 is 1.28314  
loss batch it 2030 is 1.11932  
loss batch it 2031 is 1.21808  
loss batch it 2032 is 1.33096  
loss batch it 2033 is 1.80919  
loss batch it 2034 is 1.58414  
loss batch it 2035 is 1.82975  
loss batch it 2036 is 1.70766  
loss batch it 2037 is 1.35077  
loss batch it 2038 is 1.36972  
loss batch it 2039 is 2.02064  
loss batch it 2040 is 1.15410  
loss batch it 2041 is 1.29461  
loss batch it 2042 is 3.87501  
loss batch it 2043 is 1.51570  
loss batch it 2044 is 1.33687  
loss batch it 2045 is 1.18370  
loss batch it 2046 is 1.23982  
loss batch it 2047 is 1.20295  
loss batch it 2048 is 1.20809  
loss batch it 2049 is 1.25681  
loss batch it 2050 is 1.15684  
loss batch it 2051 is 1.48711  
loss batch it 2052 is 1.26692  
loss batch it 2053 is 1.17424  
loss batch it 2054 is 1.14806  
loss batch it 2055 is 1.57218  
loss batch it 2056 is 1.41368  
loss batch it 2057 is 1.10984  
loss batch it 2058 is 1.19731  
loss batch it 2059 is 1.22259  
loss batch it 2060 is 1.42333  
loss batch it 2061 is 1.21687  
loss batch it 2062 is 1.38923  
loss batch it 2063 is 1.31198  
loss batch it 2064 is 1.10026  
loss batch it 2065 is 1.35179  
loss batch it 2066 is 1.28923  
loss batch it 2067 is 1.52254  
loss batch it 2068 is 1.40300  
loss batch it 2069 is 1.37672  
loss batch it 2070 is 1.53050  
loss batch it 2071 is 1.53451  
loss batch it 2072 is 1.14572  
loss batch it 2073 is 1.57535  
loss batch it 2074 is 1.30070  
loss batch it 2075 is 1.40256  
loss batch it 2076 is 1.31265  
loss batch it 2077 is 1.15404  
loss batch it 2078 is 1.23081  
loss batch it 2079 is 1.19787  
loss batch it 2080 is 1.19265  
loss batch it 2081 is 1.58733  
loss batch it 2082 is 1.36088  
loss batch it 2083 is 1.36042  
loss batch it 2084 is 1.51264  
loss batch it 2085 is 1.32739  
loss batch it 2086 is 1.21480  
loss batch it 2087 is 1.22538  
loss batch it 2088 is 1.22402  
loss batch it 2089 is 1.10853  
loss batch it 2090 is 1.44216  
loss batch it 2091 is 1.16366  
loss batch it 2092 is 1.27558  
loss batch it 2093 is 1.21484  
loss batch it 2094 is 1.18390  
loss batch it 2095 is 1.32494  
loss batch it 2096 is 1.52855  
loss batch it 2097 is 1.40249  
loss batch it 2098 is 1.47218  
loss batch it 2099 is 1.45921  
loss batch it 2100 is 1.67583  
loss batch it 2101 is 1.16319  
loss batch it 2102 is 1.43027  
loss batch it 2103 is 1.31191  
loss batch it 2104 is 1.21200  
loss batch it 2105 is 1.31673  
loss batch it 2106 is 1.12853  
loss batch it 2107 is 1.40008  
loss batch it 2108 is 1.71878  
loss batch it 2109 is 1.25443  
loss batch it 2110 is 1.30290  
loss batch it 2111 is 1.38406  
loss batch it 2112 is 1.76692  
loss batch it 2113 is 1.70406  
loss batch it 2114 is 1.72322  
loss batch it 2115 is 1.18375  
loss batch it 2116 is 1.18809  
loss batch it 2117 is 1.31481  
loss batch it 2118 is 1.29520  
loss batch it 2119 is 1.49591  
loss batch it 2120 is 1.18402  
loss batch it 2121 is 1.48020  
loss batch it 2122 is 1.53742  
loss batch it 2123 is 2.36711  
loss batch it 2124 is 1.21651  
loss batch it 2125 is 1.36291  
loss batch it 2126 is 1.17349  
loss batch it 2127 is 1.46110  
loss batch it 2128 is 1.30563  
loss batch it 2129 is 1.29803  
loss batch it 2130 is 1.24336  
loss batch it 2131 is 1.21559  
loss batch it 2132 is 1.23412  
loss batch it 2133 is 1.82702  
loss batch it 2134 is 1.16684  
loss batch it 2135 is 1.12778  
loss batch it 2136 is 1.31107  
loss batch it 2137 is 1.32951  
loss batch it 2138 is 1.24552  
loss batch it 2139 is 1.30027  
loss batch it 2140 is 1.60445  
loss batch it 2141 is 1.28165  
loss batch it 2142 is 1.13652  
loss batch it 2143 is 1.18213  
loss batch it 2144 is 1.18194  
loss batch it 2145 is 1.28875  
loss batch it 2146 is 1.16566  
loss batch it 2147 is 1.13995  
loss batch it 2148 is 1.73031  
loss batch it 2149 is 1.46996  
loss batch it 2150 is 1.20516  
loss batch it 2151 is 1.22305  
loss batch it 2152 is 1.47156  
loss batch it 2153 is 1.42533  
loss batch it 2154 is 1.20197  
loss batch it 2155 is 1.24026  
loss batch it 2156 is 1.71952  
loss batch it 2157 is 1.30856  
loss batch it 2158 is 1.44773  
loss batch it 2159 is 1.80202  
loss batch it 2160 is 1.34716  
loss batch it 2161 is 1.73068  
loss batch it 2162 is 1.38869  
loss batch it 2163 is 1.09949  
loss batch it 2164 is 1.10948  
loss batch it 2165 is 1.10894  
loss batch it 2166 is 1.18949  
loss batch it 2167 is 1.17494  
loss batch it 2168 is 1.19528  
loss batch it 2169 is 1.14037  
loss batch it 2170 is 1.11649  
loss batch it 2171 is 1.19342  
loss batch it 2172 is 1.18250  
loss batch it 2173 is 1.12713  
loss batch it 2174 is 1.12822  
loss batch it 2175 is 1.12344  
loss batch it 2176 is 1.01982  
loss batch it 2177 is 1.01788  
loss batch it 2178 is 1.04526  
loss batch it 2179 is 1.15840  
loss batch it 2180 is 1.06956  
loss batch it 2181 is 1.33632  
loss batch it 2182 is 1.85009  
loss batch it 2183 is 1.35460  
loss batch it 2184 is 1.29978  
loss batch it 2185 is 1.12793  
loss batch it 2186 is 1.25643  
loss batch it 2187 is 0.99539  
loss batch it 2188 is 3.14988  
loss batch it 2189 is 1.17440  
loss batch it 2190 is 1.26719  
loss batch it 2191 is 1.47231  
loss batch it 2192 is 1.03060  
loss batch it 2193 is 1.38887  
loss batch it 2194 is 1.20770  
loss batch it 2195 is 1.12096  
loss batch it 2196 is 1.15565  
loss batch it 2197 is 1.18689  
loss batch it 2198 is 1.13467  
loss batch it 2199 is 1.16504  
loss batch it 2200 is 1.09526  
loss batch it 2201 is 1.14993  
loss batch it 2202 is 1.15337  
loss batch it 2203 is 1.55390  
loss batch it 2204 is 1.19879  
loss batch it 2205 is 1.37937  
loss batch it 2206 is 1.51210  
loss batch it 2207 is 1.00916  
loss batch it 2208 is 1.41611  
loss batch it 2209 is 1.16669  
loss batch it 2210 is 1.13185  
loss batch it 2211 is 0.99334  
loss batch it 2212 is 1.34904  
loss batch it 2213 is 1.06236  
loss batch it 2214 is 1.14508  
loss batch it 2215 is 1.06185  
loss batch it 2216 is 1.04064  
loss batch it 2217 is 1.29892  
loss batch it 2218 is 1.20701  
loss batch it 2219 is 1.53346  
loss batch it 2220 is 1.06381  
loss batch it 2221 is 1.09471  
loss batch it 2222 is 1.16360  
loss batch it 2223 is 1.40102  
loss batch it 2224 is 1.19476  
loss batch it 2225 is 1.23416  
loss batch it 2226 is 1.21178  
loss batch it 2227 is 1.47083  
loss batch it 2228 is 1.08652  
loss batch it 2229 is 1.48756  
loss batch it 2230 is 1.33468  
loss batch it 2231 is 1.43134  
loss batch it 2232 is 1.46077  
loss batch it 2233 is 1.43215  
loss batch it 2234 is 1.22325  
loss batch it 2235 is 1.05604  
loss batch it 2236 is 1.16967  
loss batch it 2237 is 1.31981  
loss batch it 2238 is 1.23300  
loss batch it 2239 is 1.17624  
loss batch it 2240 is 1.11831  
loss batch it 2241 is 1.26134  
loss batch it 2242 is 1.26142  
loss batch it 2243 is 1.35294  
loss batch it 2244 is 1.28039  
loss batch it 2245 is 1.86094  
loss batch it 2246 is 1.24689  
loss batch it 2247 is 0.96215  
loss batch it 2248 is 1.15925  
loss batch it 2249 is 1.20204  
loss batch it 2250 is 1.40003  
loss batch it 2251 is 1.09133  
loss batch it 2252 is 1.03959  
loss batch it 2253 is 1.27166  
loss batch it 2254 is 1.20036  
loss batch it 2255 is 1.17209  
loss batch it 2256 is 1.45369  
loss batch it 2257 is 1.20857  
loss batch it 2258 is 1.31207  
loss batch it 2259 is 1.18736  
loss batch it 2260 is 0.93256  
loss batch it 2261 is 1.12849  
loss batch it 2262 is 1.13434  
loss batch it 2263 is 1.10073  
loss batch it 2264 is 2.07295  
loss batch it 2265 is 1.15983  
loss batch it 2266 is 1.24045  
loss batch it 2267 is 1.19274  
loss batch it 2268 is 1.16631  
loss batch it 2269 is 1.48362  
loss batch it 2270 is 1.11735  
loss batch it 2271 is 1.57476  
loss batch it 2272 is 1.16391  
loss batch it 2273 is 1.35718  
loss batch it 2274 is 1.13106  
loss batch it 2275 is 1.07082  
loss batch it 2276 is 1.32264  
loss batch it 2277 is 1.23153  
loss batch it 2278 is 1.01391  
loss batch it 2279 is 1.12473  
loss batch it 2280 is 1.13601  
loss batch it 2281 is 1.12465  
loss batch it 2282 is 1.34323  
loss batch it 2283 is 1.29293  
loss batch it 2284 is 1.04282  
loss batch it 2285 is 1.14519  
loss batch it 2286 is 1.02697  
loss batch it 2287 is 1.15659  
loss batch it 2288 is 1.18683  
loss batch it 2289 is 1.36644  
loss batch it 2290 is 1.14516  
loss batch it 2291 is 0.96480  
loss batch it 2292 is 1.05367  
loss batch it 2293 is 1.48272  
loss batch it 2294 is 1.52430  
loss batch it 2295 is 1.10822  
loss batch it 2296 is 1.30804  
loss batch it 2297 is 1.53468  
loss batch it 2298 is 0.94246  
loss batch it 2299 is 1.00169  
loss batch it 2300 is 1.07821  
loss batch it 2301 is 1.25836  
loss batch it 2302 is 0.97890  
loss batch it 2303 is 1.25685  
loss batch it 2304 is 1.08101  
loss batch it 2305 is 1.14521  
loss batch it 2306 is 1.32000  
loss batch it 2307 is 1.03223  
loss batch it 2308 is 1.06621  
loss batch it 2309 is 1.25441  
loss batch it 2310 is 1.05240  
loss batch it 2311 is 1.11075  
loss batch it 2312 is 0.98829  
loss batch it 2313 is 0.97723  
loss batch it 2314 is 1.06725  
loss batch it 2315 is 1.08843  
loss batch it 2316 is 1.02220  
loss batch it 2317 is 1.33564  
loss batch it 2318 is 1.24386  
loss batch it 2319 is 1.02371  
loss batch it 2320 is 1.05884  
loss batch it 2321 is 1.08979  
loss batch it 2322 is 1.70000  
loss batch it 2323 is 2.35256  
loss batch it 2324 is 1.73373  
loss batch it 2325 is 1.39709  
loss batch it 2326 is 1.08343  
loss batch it 2327 is 1.11973  
loss batch it 2328 is 1.47161  
loss batch it 2329 is 1.17929  
loss batch it 2330 is 1.07545  
loss batch it 2331 is 1.12947  
loss batch it 2332 is 1.22933  
loss batch it 2333 is 1.31497  
loss batch it 2334 is 1.06027  
loss batch it 2335 is 1.10127  
loss batch it 2336 is 1.18107  
loss batch it 2337 is 1.19294  
loss batch it 2338 is 1.11425  
loss batch it 2339 is 0.99469  
loss batch it 2340 is 1.13258  
loss batch it 2341 is 1.35499  
loss batch it 2342 is 1.14316  
loss batch it 2343 is 0.94897  
loss batch it 2344 is 1.07302  
loss batch it 2345 is 1.12936  
loss batch it 2346 is 1.00868  
loss batch it 2347 is 1.17059  
loss batch it 2348 is 1.48693  
loss batch it 2349 is 1.18102  
loss batch it 2350 is 2.35166  
loss batch it 2351 is 1.55091  
loss batch it 2352 is 1.09533  
loss batch it 2353 is 1.03820  
loss batch it 2354 is 2.72505  
loss batch it 2355 is 1.09679  
loss batch it 2356 is 1.61910  
loss batch it 2357 is 1.10929  
loss batch it 2358 is 1.07276  
loss batch it 2359 is 1.03915  
loss batch it 2360 is 1.07828  
loss batch it 2361 is 1.04596  
loss batch it 2362 is 1.05089  
loss batch it 2363 is 1.15359  
loss batch it 2364 is 1.02555  
loss batch it 2365 is 0.95578  
loss batch it 2366 is 1.04891  
loss batch it 2367 is 1.17679  
loss batch it 2368 is 1.13146  
loss batch it 2369 is 1.02017  
loss batch it 2370 is 1.03489  
loss batch it 2371 is 1.47705  
loss batch it 2372 is 1.09579  
loss batch it 2373 is 1.14682  
loss batch it 2374 is 1.14299  
loss batch it 2375 is 1.65539  
loss batch it 2376 is 1.08074  
loss batch it 2377 is 1.24206  
loss batch it 2378 is 0.97661  
loss batch it 2379 is 1.05342  
loss batch it 2380 is 1.10958  
loss batch it 2381 is 1.10765  
loss batch it 2382 is 1.09899  
loss batch it 2383 is 1.23262  
loss batch it 2384 is 1.04202  
loss batch it 2385 is 1.08578  
loss batch it 2386 is 1.13658  
loss batch it 2387 is 1.34610  
loss batch it 2388 is 1.36248  
loss batch it 2389 is 1.00477  
loss batch it 2390 is 1.86971  
loss batch it 2391 is 1.32409  
loss batch it 2392 is 1.97505  
loss batch it 2393 is 1.84755  
loss batch it 2394 is 1.31609  
loss batch it 2395 is 0.98049  
loss batch it 2396 is 1.09490  
loss batch it 2397 is 1.10214  
loss batch it 2398 is 1.07784  
loss batch it 2399 is 1.09324  
loss batch it 2400 is 1.03440  
loss batch it 2401 is 0.95862  
loss batch it 2402 is 0.99285  
loss batch it 2403 is 0.92557  
loss batch it 2404 is 0.95877  
loss batch it 2405 is 1.03158  
loss batch it 2406 is 1.27449  
loss batch it 2407 is 1.23030  
loss batch it 2408 is 1.04665  
loss batch it 2409 is 1.04757  
loss batch it 2410 is 1.05219  
loss batch it 2411 is 1.09733  
loss batch it 2412 is 0.96500  
loss batch it 2413 is 1.00118  
loss batch it 2414 is 0.95949  
loss batch it 2415 is 1.15406  
loss batch it 2416 is 1.00399  
loss batch it 2417 is 2.78227  
loss batch it 2418 is 1.21650  
loss batch it 2419 is 1.43811  
loss batch it 2420 is 1.23876  
loss batch it 2421 is 1.07910  
loss batch it 2422 is 0.99759  
loss batch it 2423 is 1.09631  
loss batch it 2424 is 1.02244  
loss batch it 2425 is 1.04222  
loss batch it 2426 is 0.99381  
loss batch it 2427 is 0.97747  
loss batch it 2428 is 1.05558  
loss batch it 2429 is 0.99725  
loss batch it 2430 is 0.91088  
loss batch it 2431 is 0.91426  
loss batch it 2432 is 0.96084  
loss batch it 2433 is 1.18391  
loss batch it 2434 is 1.02482  
loss batch it 2435 is 1.11126  
loss batch it 2436 is 1.10360  
loss batch it 2437 is 0.97284  
loss batch it 2438 is 1.00508  
loss batch it 2439 is 0.93383  
loss batch it 2440 is 0.96331  
loss batch it 2441 is 0.99639  
loss batch it 2442 is 1.04333  
loss batch it 2443 is 1.01465  
loss batch it 2444 is 0.92991  
loss batch it 2445 is 0.92226  
loss batch it 2446 is 0.99799  
loss batch it 2447 is 0.97238  
loss batch it 2448 is 1.08633  
loss batch it 2449 is 0.99222  
loss batch it 2450 is 1.15275  
loss batch it 2451 is 1.03937  
loss batch it 2452 is 2.47296  
loss batch it 2453 is 0.97991  
loss batch it 2454 is 1.04740  
loss batch it 2455 is 1.13704  
loss batch it 2456 is 0.99178  
loss batch it 2457 is 0.95583  
loss batch it 2458 is 1.19068  
loss batch it 2459 is 1.07977  
loss batch it 2460 is 1.40661  
loss batch it 2461 is 1.19597  
loss batch it 2462 is 0.97913  
loss batch it 2463 is 1.03415  
loss batch it 2464 is 1.17017  
loss batch it 2465 is 1.21894  
loss batch it 2466 is 1.10690  
loss batch it 2467 is 1.06160  
loss batch it 2468 is 0.93769  
loss batch it 2469 is 1.83763  
loss batch it 2470 is 1.00310  
loss batch it 2471 is 1.11932  
loss batch it 2472 is 1.18779  
loss batch it 2473 is 1.02135  
loss batch it 2474 is 1.64637  
loss batch it 2475 is 1.03513  
loss batch it 2476 is 1.22729  
loss batch it 2477 is 1.11133  
loss batch it 2478 is 1.14274  
loss batch it 2479 is 0.93769  
loss batch it 2480 is 1.07610  
loss batch it 2481 is 1.40852  
loss batch it 2482 is 1.05182  
loss batch it 2483 is 0.91912  
loss batch it 2484 is 1.20052  
loss batch it 2485 is 0.96288  
loss batch it 2486 is 1.46173  
loss batch it 2487 is 0.90434  
loss batch it 2488 is 1.27809  
loss batch it 2489 is 1.18980  
loss batch it 2490 is 1.06585  
loss batch it 2491 is 0.91778  
loss batch it 2492 is 0.95407  
loss batch it 2493 is 1.00080  
loss batch it 2494 is 1.06437  
loss batch it 2495 is 0.95571  
loss batch it 2496 is 0.93737  
loss batch it 2497 is 1.10581  
loss batch it 2498 is 0.95065  
loss batch it 2499 is 0.99997  
loss batch it 2500 is 1.00299  
loss batch it 2501 is 1.01734  
loss batch it 2502 is 0.88903  
loss batch it 2503 is 1.06360  
loss batch it 2504 is 1.09521  
loss batch it 2505 is 1.01413  
loss batch it 2506 is 1.14794  
loss batch it 2507 is 0.98511  
loss batch it 2508 is 1.08707  
loss batch it 2509 is 2.20427  
loss batch it 2510 is 1.45767  
loss batch it 2511 is 0.98373  
loss batch it 2512 is 1.00455  
loss batch it 2513 is 0.96823  
loss batch it 2514 is 0.92440  
loss batch it 2515 is 0.89048  
loss batch it 2516 is 0.99081  
loss batch it 2517 is 0.99179  
loss batch it 2518 is 0.92723  
loss batch it 2519 is 1.05132  
loss batch it 2520 is 1.27652  
loss batch it 2521 is 1.08475  
loss batch it 2522 is 1.17453  
loss batch it 2523 is 0.91600  
loss batch it 2524 is 1.50280  
loss batch it 2525 is 1.16091  
loss batch it 2526 is 1.00854  
loss batch it 2527 is 1.06806  
loss batch it 2528 is 1.30950  
loss batch it 2529 is 1.20054  
loss batch it 2530 is 1.32902  
loss batch it 2531 is 1.28991  
loss batch it 2532 is 1.07349  
loss batch it 2533 is 0.95853  
loss batch it 2534 is 1.02695  
loss batch it 2535 is 1.02861  
loss batch it 2536 is 0.92547  
loss batch it 2537 is 0.96669  
loss batch it 2538 is 1.00166  
loss batch it 2539 is 1.28630  
loss batch it 2540 is 1.56038  
loss batch it 2541 is 0.94311  
loss batch it 2542 is 0.95811  
loss batch it 2543 is 1.28995  
loss batch it 2544 is 0.93422  
loss batch it 2545 is 0.97983  
loss batch it 2546 is 0.95684  
loss batch it 2547 is 1.10859  
loss batch it 2548 is 0.96119  
loss batch it 2549 is 1.11523  
loss batch it 2550 is 1.38426  
loss batch it 2551 is 0.97952  
loss batch it 2552 is 1.15462  
loss batch it 2553 is 1.09205  
loss batch it 2554 is 1.03160  
loss batch it 2555 is 0.90323  
loss batch it 2556 is 1.06745  
loss batch it 2557 is 0.99476  
loss batch it 2558 is 1.05017  
loss batch it 2559 is 1.18707  
loss batch it 2560 is 1.05110  
loss batch it 2561 is 1.09553  
loss batch it 2562 is 0.93285  
loss batch it 2563 is 0.96619  
loss batch it 2564 is 0.84324  
loss batch it 2565 is 0.95512  
loss batch it 2566 is 1.00889  
loss batch it 2567 is 1.05409  
loss batch it 2568 is 1.06295  
loss batch it 2569 is 0.93194  
loss batch it 2570 is 1.01484  
loss batch it 2571 is 0.95172  
loss batch it 2572 is 1.23422  
loss batch it 2573 is 0.92500  
loss batch it 2574 is 0.93391  
loss batch it 2575 is 1.02100  
loss batch it 2576 is 0.96639  
loss batch it 2577 is 1.28178  
loss batch it 2578 is 1.04391  
loss batch it 2579 is 1.97514  
loss batch it 2580 is 1.18141  
loss batch it 2581 is 1.32377  
loss batch it 2582 is 0.93760  
loss batch it 2583 is 0.89629  
loss batch it 2584 is 0.96532  
loss batch it 2585 is 0.98551  
loss batch it 2586 is 0.97277  
loss batch it 2587 is 1.00028  
loss batch it 2588 is 0.97673  
loss batch it 2589 is 0.97756  
loss batch it 2590 is 0.90050  
loss batch it 2591 is 0.96089  
loss batch it 2592 is 0.91374  
loss batch it 2593 is 0.92712  
loss batch it 2594 is 0.95036  
loss batch it 2595 is 1.00372  
loss batch it 2596 is 1.11813  
loss batch it 2597 is 0.94596  
loss batch it 2598 is 0.95795  
loss batch it 2599 is 0.90533  
loss batch it 2600 is 1.00133  
loss batch it 2601 is 0.84603  
loss batch it 2602 is 0.97277  
loss batch it 2603 is 0.98755  
loss batch it 2604 is 0.90130  
loss batch it 2605 is 0.96995  
loss batch it 2606 is 0.95242  
loss batch it 2607 is 1.03050  
loss batch it 2608 is 1.16998  
loss batch it 2609 is 0.98517  
loss batch it 2610 is 0.91356  
loss batch it 2611 is 1.03640  
loss batch it 2612 is 0.85732  
loss batch it 2613 is 0.96996  
loss batch it 2614 is 0.90873  
loss batch it 2615 is 0.84202  
loss batch it 2616 is 1.08939  
loss batch it 2617 is 1.57034  
loss batch it 2618 is 1.15177  
loss batch it 2619 is 1.16926  
loss batch it 2620 is 0.92847  
loss batch it 2621 is 0.93669  
loss batch it 2622 is 0.96250  
loss batch it 2623 is 0.90088  
loss batch it 2624 is 1.22995  
loss batch it 2625 is 0.87802  
loss batch it 2626 is 1.04671  
loss batch it 2627 is 1.07017  
loss batch it 2628 is 1.08520  
loss batch it 2629 is 0.89934  
loss batch it 2630 is 0.97208  
loss batch it 2631 is 0.95105  
loss batch it 2632 is 1.05286  
loss batch it 2633 is 1.12191  
loss batch it 2634 is 1.09697  
loss batch it 2635 is 1.00076  
loss batch it 2636 is 1.07597  
loss batch it 2637 is 1.09118  
loss batch it 2638 is 1.02570  
loss batch it 2639 is 0.91301  
loss batch it 2640 is 1.03447  
loss batch it 2641 is 0.97576  
loss batch it 2642 is 0.92493  
loss batch it 2643 is 1.09993  
loss batch it 2644 is 0.99620  
loss batch it 2645 is 1.25472  
loss batch it 2646 is 1.07452  
loss batch it 2647 is 1.44334  
loss batch it 2648 is 0.83252  
loss batch it 2649 is 1.25629  
loss batch it 2650 is 0.86301  
loss batch it 2651 is 0.99914  
loss batch it 2652 is 0.86553  
loss batch it 2653 is 1.20445  
loss batch it 2654 is 0.95333  
loss batch it 2655 is 0.98647  
loss batch it 2656 is 0.92994  
loss batch it 2657 is 1.00936  
loss batch it 2658 is 1.17917  
loss batch it 2659 is 1.04264  
loss batch it 2660 is 0.89816  
loss batch it 2661 is 1.20491  
loss batch it 2662 is 1.09792  
loss batch it 2663 is 0.88089  
loss batch it 2664 is 1.16645  
loss batch it 2665 is 0.94403  
loss batch it 2666 is 0.84768  
loss batch it 2667 is 0.94439  
loss batch it 2668 is 0.96697  
loss batch it 2669 is 0.98578  
loss batch it 2670 is 1.02925  
loss batch it 2671 is 0.79911  
loss batch it 2672 is 1.35447  
loss batch it 2673 is 1.57654  
loss batch it 2674 is 1.01236  
loss batch it 2675 is 1.03588  
loss batch it 2676 is 1.34310  
loss batch it 2677 is 1.47861  
loss batch it 2678 is 1.26827  
loss batch it 2679 is 0.99740  
loss batch it 2680 is 1.20224  
loss batch it 2681 is 0.93401  
loss batch it 2682 is 0.89747  
loss batch it 2683 is 0.88046  
loss batch it 2684 is 0.90618  
loss batch it 2685 is 0.89773  
loss batch it 2686 is 0.94040  
loss batch it 2687 is 0.88008  
loss batch it 2688 is 1.08324  
loss batch it 2689 is 0.95131  
loss batch it 2690 is 0.91835  
loss batch it 2691 is 0.84128  
loss batch it 2692 is 0.94328  
loss batch it 2693 is 0.93073  
loss batch it 2694 is 0.90194  
loss batch it 2695 is 0.78227  
loss batch it 2696 is 0.83193  
loss batch it 2697 is 0.83533  
loss batch it 2698 is 0.86018  
loss batch it 2699 is 0.89697  
loss batch it 2700 is 0.91632  
loss batch it 2701 is 0.82733  
loss batch it 2702 is 1.05319  
loss batch it 2703 is 0.94587  
loss batch it 2704 is 0.91813  
loss batch it 2705 is 0.97028  
loss batch it 2706 is 1.14835  
loss batch it 2707 is 1.09099  
loss batch it 2708 is 0.90883  
loss batch it 2709 is 0.94388  
loss batch it 2710 is 0.97023  
loss batch it 2711 is 0.93770  
loss batch it 2712 is 1.14737  
loss batch it 2713 is 0.84131  
loss batch it 2714 is 0.97937  
loss batch it 2715 is 1.05886  
loss batch it 2716 is 0.88408  
loss batch it 2717 is 0.99086  
loss batch it 2718 is 0.91253  
loss batch it 2719 is 1.00787  
loss batch it 2720 is 1.15973  
loss batch it 2721 is 0.90419  
loss batch it 2722 is 0.92780  
loss batch it 2723 is 0.95736  
loss batch it 2724 is 0.84016  
loss batch it 2725 is 0.88628  
loss batch it 2726 is 1.60571  
loss batch it 2727 is 1.52077  
loss batch it 2728 is 0.88156  
loss batch it 2729 is 2.17291  
loss batch it 2730 is 1.28640  
loss batch it 2731 is 1.00885  
loss batch it 2732 is 0.96685  
loss batch it 2733 is 1.04036  
loss batch it 2734 is 0.93016  
loss batch it 2735 is 1.02358  
loss batch it 2736 is 0.88995  
loss batch it 2737 is 1.02527  
loss batch it 2738 is 1.02483  
loss batch it 2739 is 0.89883  
loss batch it 2740 is 1.37766  
loss batch it 2741 is 1.07427  
loss batch it 2742 is 0.90152  
loss batch it 2743 is 0.93368  
loss batch it 2744 is 0.85208  
loss batch it 2745 is 0.92705  
loss batch it 2746 is 1.18499  
loss batch it 2747 is 1.05275  
loss batch it 2748 is 0.80767  
loss batch it 2749 is 0.86948  
loss batch it 2750 is 1.23448  
loss batch it 2751 is 0.84075  
loss batch it 2752 is 1.28701  
loss batch it 2753 is 1.04855  
loss batch it 2754 is 0.86087  
loss batch it 2755 is 0.88329  
loss batch it 2756 is 0.89245  
loss batch it 2757 is 1.00898  
loss batch it 2758 is 0.97206  
loss batch it 2759 is 0.99798  
loss batch it 2760 is 0.97873  
loss batch it 2761 is 0.82102  
loss batch it 2762 is 0.95775  
loss batch it 2763 is 1.16125  
loss batch it 2764 is 0.98766  
loss batch it 2765 is 0.95134  
loss batch it 2766 is 0.98472  
loss batch it 2767 is 1.39751  
loss batch it 2768 is 0.89029  
loss batch it 2769 is 1.06135  
loss batch it 2770 is 1.40764  
loss batch it 2771 is 0.89647  
loss batch it 2772 is 0.92601  
loss batch it 2773 is 1.09214  
loss batch it 2774 is 1.47954  
loss batch it 2775 is 0.96390  
loss batch it 2776 is 1.00390  
loss batch it 2777 is 0.86568  
loss batch it 2778 is 1.55986  
loss batch it 2779 is 0.84827  
loss batch it 2780 is 0.90698  
loss batch it 2781 is 0.94690  
loss batch it 2782 is 0.85966  
loss batch it 2783 is 0.87266  
loss batch it 2784 is 0.87779  
loss batch it 2785 is 0.83937  
loss batch it 2786 is 0.93525  
loss batch it 2787 is 0.84053  
loss batch it 2788 is 0.86068  
loss batch it 2789 is 1.03862  
loss batch it 2790 is 0.95534  
loss batch it 2791 is 0.94346  
loss batch it 2792 is 0.86377  
loss batch it 2793 is 0.88603  
loss batch it 2794 is 0.84187  
loss batch it 2795 is 0.82311  
loss batch it 2796 is 0.87722  
loss batch it 2797 is 1.02649  
loss batch it 2798 is 0.99893  
loss batch it 2799 is 0.81530  
loss batch it 2800 is 0.85071  
loss batch it 2801 is 0.96402  
loss batch it 2802 is 0.85210  
loss batch it 2803 is 1.56217  
loss batch it 2804 is 1.74852  
loss batch it 2805 is 0.81633  
loss batch it 2806 is 0.86659  
loss batch it 2807 is 0.92490  
loss batch it 2808 is 0.86002  
loss batch it 2809 is 1.03070  
loss batch it 2810 is 0.88794  
loss batch it 2811 is 0.92085  
loss batch it 2812 is 0.78785  
loss batch it 2813 is 0.83183  
loss batch it 2814 is 0.82210  
loss batch it 2815 is 0.84186  
loss batch it 2816 is 0.89319  
loss batch it 2817 is 0.88096  
loss batch it 2818 is 1.06744  
loss batch it 2819 is 0.84715  
loss batch it 2820 is 0.95957  
loss batch it 2821 is 0.81934  
loss batch it 2822 is 0.80954  
loss batch it 2823 is 0.82290  
loss batch it 2824 is 1.10571  
loss batch it 2825 is 1.14600  
loss batch it 2826 is 1.55145  
loss batch it 2827 is 1.04613  
loss batch it 2828 is 0.87242  
loss batch it 2829 is 0.82872  
loss batch it 2830 is 0.96465  
loss batch it 2831 is 0.90868  
loss batch it 2832 is 0.87262  
loss batch it 2833 is 0.85153  
loss batch it 2834 is 0.81196  
loss batch it 2835 is 0.80514  
loss batch it 2836 is 0.78589  
loss batch it 2837 is 0.87550  
loss batch it 2838 is 0.80008  
loss batch it 2839 is 0.86223  
loss batch it 2840 is 0.84045  
loss batch it 2841 is 0.93062  
loss batch it 2842 is 0.84874  
loss batch it 2843 is 0.86424  
loss batch it 2844 is 0.82674  
loss batch it 2845 is 0.97517  
loss batch it 2846 is 0.93374  
loss batch it 2847 is 0.83360  
loss batch it 2848 is 0.94704  
loss batch it 2849 is 0.79808  
loss batch it 2850 is 1.00888  
loss batch it 2851 is 0.92491  
loss batch it 2852 is 0.80498  
loss batch it 2853 is 0.82555  
loss batch it 2854 is 1.05716  
loss batch it 2855 is 0.84577  
loss batch it 2856 is 0.80517  
loss batch it 2857 is 0.89539  
loss batch it 2858 is 1.14331  
loss batch it 2859 is 1.00854  
loss batch it 2860 is 0.77347  
loss batch it 2861 is 0.85057  
loss batch it 2862 is 0.95193  
loss batch it 2863 is 1.01025  
loss batch it 2864 is 0.79785  
loss batch it 2865 is 0.96882  
loss batch it 2866 is 0.79764  
loss batch it 2867 is 0.89938  
loss batch it 2868 is 0.88683  
loss batch it 2869 is 1.25857  
loss batch it 2870 is 0.97838  
loss batch it 2871 is 1.01819  
loss batch it 2872 is 0.95121  
loss batch it 2873 is 0.91153  
loss batch it 2874 is 0.90807  
loss batch it 2875 is 1.01168  
loss batch it 2876 is 0.83921  
loss batch it 2877 is 0.75220  
loss batch it 2878 is 0.84743  
loss batch it 2879 is 0.86288  
loss batch it 2880 is 0.81303  
loss batch it 2881 is 1.00243  
loss batch it 2882 is 0.84816  
loss batch it 2883 is 0.87434  
loss batch it 2884 is 0.78825  
loss batch it 2885 is 0.76471  
loss batch it 2886 is 0.89624  
loss batch it 2887 is 1.34940  
loss batch it 2888 is 0.82347  
loss batch it 2889 is 1.46628  
loss batch it 2890 is 0.82372  
loss batch it 2891 is 0.83920  
loss batch it 2892 is 0.87561  
loss batch it 2893 is 0.73301  
loss batch it 2894 is 0.81728  
loss batch it 2895 is 1.09188  
loss batch it 2896 is 0.97807  
loss batch it 2897 is 0.79831  
loss batch it 2898 is 1.17658  
loss batch it 2899 is 0.87255  
loss batch it 2900 is 0.85347  
loss batch it 2901 is 0.84041  
loss batch it 2902 is 0.75425  
loss batch it 2903 is 0.79835  
loss batch it 2904 is 0.73983  
loss batch it 2905 is 0.95390  
loss batch it 2906 is 0.90805  
loss batch it 2907 is 0.85533  
loss batch it 2908 is 1.18092  
loss batch it 2909 is 0.77374  
loss batch it 2910 is 0.94383  
loss batch it 2911 is 1.64335  
loss batch it 2912 is 1.13988  
loss batch it 2913 is 1.10356  
loss batch it 2914 is 1.05030  
loss batch it 2915 is 1.04310  
loss batch it 2916 is 0.84641  
loss batch it 2917 is 0.90130  
loss batch it 2918 is 0.92793  
loss batch it 2919 is 0.82527  
loss batch it 2920 is 0.78696  
loss batch it 2921 is 0.87371  
loss batch it 2922 is 0.89004  
loss batch it 2923 is 0.82637  
loss batch it 2924 is 0.79851  
loss batch it 2925 is 0.86021  
loss batch it 2926 is 0.81227  
loss batch it 2927 is 0.86314  
loss batch it 2928 is 0.74771  
loss batch it 2929 is 0.81204  
loss batch it 2930 is 0.95431  
loss batch it 2931 is 0.84975  
loss batch it 2932 is 0.75589  
loss batch it 2933 is 0.79681  
loss batch it 2934 is 0.83856  
loss batch it 2935 is 0.80227  
loss batch it 2936 is 1.13074  
loss batch it 2937 is 0.95488  
loss batch it 2938 is 0.85394  
loss batch it 2939 is 0.86848  
loss batch it 2940 is 1.12985  
loss batch it 2941 is 0.80164  
loss batch it 2942 is 0.83436  
loss batch it 2943 is 0.95778  
loss batch it 2944 is 0.80265  
loss batch it 2945 is 0.86784  
loss batch it 2946 is 0.86071  
loss batch it 2947 is 0.84270  
loss batch it 2948 is 0.80542  
loss batch it 2949 is 0.85243  
loss batch it 2950 is 0.78663  
loss batch it 2951 is 0.85211  
loss batch it 2952 is 0.77030  
loss batch it 2953 is 0.83138  
loss batch it 2954 is 0.82331  
loss batch it 2955 is 0.79665  
loss batch it 2956 is 0.83824  
loss batch it 2957 is 0.79350  
loss batch it 2958 is 1.02392  
loss batch it 2959 is 1.10970  
loss batch it 2960 is 0.87197  
loss batch it 2961 is 1.26882  
loss batch it 2962 is 1.37371  
loss batch it 2963 is 0.89097  
loss batch it 2964 is 0.87763  
loss batch it 2965 is 1.39492  
loss batch it 2966 is 0.84188  
loss batch it 2967 is 1.01972  
loss batch it 2968 is 0.86684  
loss batch it 2969 is 0.76246  
loss batch it 2970 is 1.69061  
loss batch it 2971 is 1.95682  
loss batch it 2972 is 0.83260  
loss batch it 2973 is 0.95239  
loss batch it 2974 is 0.93123  
loss batch it 2975 is 0.90702  
loss batch it 2976 is 0.85987  
loss batch it 2977 is 0.92759  
loss batch it 2978 is 0.77372  
loss batch it 2979 is 0.85784  
loss batch it 2980 is 0.83157  
loss batch it 2981 is 0.80639  
loss batch it 2982 is 0.94677  
loss batch it 2983 is 0.85143  
loss batch it 2984 is 0.75076  
loss batch it 2985 is 0.91043  
loss batch it 2986 is 0.95823  
loss batch it 2987 is 1.11657  
loss batch it 2988 is 1.16575  
loss batch it 2989 is 0.79626  
loss batch it 2990 is 0.92987  
loss batch it 2991 is 0.85844  
loss batch it 2992 is 0.79201  
loss batch it 2993 is 0.86121  
loss batch it 2994 is 0.85214  
loss batch it 2995 is 0.87049  
loss batch it 2996 is 0.80103  
loss batch it 2997 is 0.79168  
loss batch it 2998 is 0.77584  
loss batch it 2999 is 0.86287  
In [20]:
#Let's see what it looks like on one of the validation sets...
font_size = 10
mygen = myGenerator(batch_size = 4, num_batches = 10)

mxtrain, mytrain, mxvalid, myvalid = next(mygen)

predicted_wall = ynet(mxvalid)

fig, axes = plt.subplots(nrows=4, ncols=3, figsize = (10,10))
for row in range(4):
    axes[row,0].plot(mxvalid[0][row,:])
    vval = mxvalid[1][row,0]
    pval = mxvalid[1][row,1]
    
    axes[row,0].set_title('Input, V = {:.2f} {:.2f}ms'.format(vval, pval), fontsize = 10)
    
    axes[row,1].set_title('Correct Output', fontsize = font_size)
    axes[row,1].plot(myvalid[row][:])
    
    axes[row,2].plot(predicted_wall[:,:-2][row], 'b-', label = 'prediction')
    axes[row,2].plot(myvalid[row][:], 'r-', label = 'valid')
    axes[row,2].set_title('Predicted Output', fontsize = font_size)
    axes[row,2].legend(loc = [1.0, 0.5])
    
fig.tight_layout()
In [21]:
import pandas as pd
import seaborn as sns
#Let's take one prediction, and see how it varies with upping the bias
outputs = []

test_images, test_actions = [], []

#test_image = mxvalid[0][1]
test_image = np.full(14,-0.1)

voltages = [-1.0,-0.9,-0.8,-0.7,-0.6,-0.5,-0.4,-0.3,-0.2,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]
#voltages = [0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]

pwidths = [0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]


for v in voltages:
    for p in pwidths:
        test_action_val_new_valv=v
        test_action_val_new_valp=p    
        test_actions.append([test_action_val_new_valv, test_action_val_new_valp])
        test_images.append(test_image)
test_input = [tf.stack(test_images), tf.stack(test_actions)] 

output = ynet(test_input)

df = pd.DataFrame(output.numpy())
maxValues = df.abs().max(axis = 1)
maxValues=maxValues*7.8125 #convert to real values (nm)

new_actions = []
for i in range(len(test_actions)):
    row = test_actions[i]
    row = np.array(row)
    row[0]=row[0]*10 #convert to real values (V)
    temp=row[1]*300 #convert to real values (ms)
    row[1]=round(temp)
    new_actions.append(row)
    
actionsdf = pd.DataFrame (new_actions, columns = ['Voltage (V) ','Pulse Width (ms)'])
actionsdf=actionsdf.merge(maxValues.to_frame(), left_index=True, right_index=True)

table = actionsdf.pivot('Pulse Width (ms)', 'Voltage (V) ')
ax = sns.heatmap(table[0],cbar_kws={'label': 'Displacement (nm)'})
og_table=table[0]
ax.invert_yaxis()
plt.yticks(rotation = 0)
plt.xticks(fontsize=7,rotation=0)
plt.xlabel('Voltage (V) ',fontweight='bold')
plt.ylabel('Pulse Width (ms) ',fontweight='bold')
plt.show()
In [22]:
out = np.array(output[139])
out = np.pad(out, pad_width=1)
plt.plot(out)
Out[22]:
[<matplotlib.lines.Line2D at 0x7f85cc0ee520>]
In [23]:
import pandas as pd
import seaborn as sns
#Let's take one prediction, and see how it varies with upping the bias
outputs = []

test_images, test_actions = [], []

#test_image = mxvalid[0][1]
test_image = out
#test_image = row

voltages = [-1.0,-0.9,-0.8,-0.7,-0.6,-0.5,-0.4,-0.3,-0.2,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]

pwidths = [0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]


for v in voltages:
    for p in pwidths:
        test_action_val_new_valv=v
        test_action_val_new_valp=p    
        test_actions.append([test_action_val_new_valv, test_action_val_new_valp])
        test_images.append(test_image)
test_input = [tf.stack(test_images), tf.stack(test_actions)] 

output = ynet(test_input)

df = pd.DataFrame(output.numpy())
maxValues = df.abs().max(axis = 1)
maxValues=maxValues*7.8125

new_actions = []
for i in range(len(test_actions)):
    row = test_actions[i]
    row = np.array(row)
    row[0]=row[0]*10
    temp=row[1]*300
    row[1]=round(temp)
    new_actions.append(row)
    
actionsdf = pd.DataFrame (new_actions, columns = ['Voltage (V) ','Pulse Width (ms)'])
actionsdf=actionsdf.merge(maxValues.to_frame(), left_index=True, right_index=True)

table = actionsdf.pivot('Pulse Width (ms)', 'Voltage (V) ')
ax = sns.heatmap(table[0],cbar_kws={'label': 'Displacement (nm)'})
bulged_table = table[0]
ax.invert_yaxis()
plt.yticks(rotation = 0)
plt.xticks(fontsize=7,rotation=0)
plt.xlabel('Voltage (V) ',fontweight='bold')
plt.ylabel('Pulse Width (ms) ',fontweight='bold')
plt.show()
In [ ]:
 
In [ ]:
 
In [ ]:
 
In [ ]:

Test on datasets seperately¶

In [24]:
#old data
pix = 128
reset_freq = 10
max_bias = 10
max_pw = 300
window_size=3

local_win_size = 7
min_ind = 0.046875 
max_ind = 0.9

phase_images = []
amp_images = []

for ind in range(len(data_collected)):
    output=np.asarray(data_collected[ind])
    amp_img = output[2].reshape(-1, pix*2)
    phase_img = output[3].reshape(-1, pix*2)

    amp_images.append(amp_img[:,:pix])
    phase_images.append(phase_img[:,:pix]) 

l=0

actions = []
actions_norm = []

index_tracker =[]

for ind in range(len(data_collected)):

    
    if ind%reset_freq!=0:
        xpos,ypos = wall_bias_locs[l][3], wall_bias_locs[l][4] 
        bias_amp, bias_pw = wall_bias_locs[l][1], wall_bias_locs[l][2]
        #encoded_row_location = ypos.astype(int)
        #encoded_info = encoded_old[l][encoded_row_location]
        #zero = encoded_info[0]
        #one = encoded_info[1]
        #two = encoded_info[2]
        #three = encoded_info[3]

        xpos_norm = xpos/pix
        ypos_norm = ypos/pix
        bias_amp_norm = bias_amp/max_bias
        bias_pw_norm = bias_pw/max_pw
        index_tracker.append((ind,l))
        l+=1
    else:
        xpos = np.nan
        ypos = np.nan
        xpos_norm = np.nan
        ypos_norm = np.nan
        bias_amp = np.nan
        bias_pw = np.nan
        bias_amp_norm = np.nan
        bias_pw_norm = np.nan
        #encoded_info = np.nan
        zero = np.nan
        one = np.nan
        two = np.nan
        three = np.nan
    
    actions.append([xpos,ypos, bias_amp, bias_pw])
    actions_norm.append([xpos_norm, ypos_norm, bias_amp_norm, bias_pw_norm])
    #actions.append([xpos,ypos, bias_amp, bias_pw,zero,one,two,three])
    #actions_norm.append([xpos_norm, ypos_norm, bias_amp_norm, bias_pw_norm,zero,one,two,three])

phase_images_segmented = np.copy(amp_images)
phase_images_segmented = normalize_images(phase_images_segmented)
phase_images_segmented[phase_images_segmented<0.2] = 0
phase_images_segmented[phase_images_segmented>=0.4] = 1


transitions = []
transitions_norm = []
transitions_profiles = []
for ind in range(1, len(phase_images)):
    tnew = namedtuple('Transition', ['state','action', 'next_state'])
    tnew_norm = namedtuple('Transition', ['state','action', 'next_state'])
    tnew_prof = namedtuple('Transition', ['state','action', 'next_state'])
    
    tnew.state = phase_images_segmented[ind-1]
    tnew.next_state = phase_images_segmented[ind]
    tnew.action = actions[ind]
    
    tnew_norm.state = phase_images_segmented[ind-1]
    tnew_norm.next_state = phase_images_segmented[ind]
    tnew_norm.action = actions_norm[ind]
    
    state = tnew.state
    next_state = tnew.next_state
    shift, error, diffphase = phase_cross_correlation(state, next_state,
                                                  upsample_factor=3)
    offset_image = fourier_shift(np.fft.fftn(next_state), shift)
    offset_image = np.fft.ifftn(offset_image)
    next_state = offset_image.real
    wps = return_norm_wall_loc(state, window_size=window_size) + start_pix
    wps_next = return_norm_wall_loc(next_state,window_size=window_size) + start_pix
    
    tnew_prof.state = wps
    tnew_prof.action = tnew_norm.action
    tnew_prof.next_state = wps_next
    
    if not np.isnan(tnew.action[0]) and not np.isnan(actions[ind+1][0]):
        transitions.append(tnew)
        transitions_norm.append(tnew_norm)
        transitions_profiles.append(tnew_prof)
local_win_size = 7
offset=5
local_state_size = 14
train_fraction = 0.80
num_training_points = len(transitions_norm)

train_split_indices = np.random.choice(np.arange(len(transitions_norm)),
                                       (int(num_training_points*train_fraction)),
                                       replace = False)

test_split_indices = [val for val in np.arange(len(transitions_norm)) if val not in train_split_indices]

#Once we have the indices we need to make the training data. X_train, y_train, X_test, y_test
#X_train is the action, state, y_train is the state+1
#Same goes for X_test and y_test
X_train, y_train, X_test, y_test = [], [], [], []

for train_ind in train_split_indices:
    transition = transitions_norm[train_ind]
    trans_profile = transitions_profiles[train_ind]
    if transition.action[1] > min_ind and transition.action[1] < max_ind:
        wall_pos = transition.action[1]*128 + offset
        wall = np.array(trans_profile.state)
        local_wall = wall[int(wall_pos - local_win_size): int(wall_pos + local_win_size)]
        local_wall = local_wall - np.mean(local_wall)
        lwall = np.zeros(local_state_size)
        lwall[:len(local_wall)] = local_wall
        lwall[len(local_wall):] = local_wall[-1]
        X_train.append([lwall, transition.action[2:]])
        difference_profile = trans_profile.next_state - trans_profile.state
        difference_profile[difference_profile>10]=10.0
        difference_profile[difference_profile<-10]=-10.0
        difference_profile = difference_profile/10
        difference_profile = difference_profile[int(wall_pos - local_win_size): int(wall_pos + local_win_size)]
        dprof = np.zeros(local_state_size)
        dprof[:len(difference_profile)] = difference_profile
        dprof[len(difference_profile):] = difference_profile[-1]
        y_train.append(smooth_window(dprof,window_size=3))

for test_ind in test_split_indices:
    transition = transitions_norm[test_ind]
    trans_profile = transitions_profiles[test_ind]
    if transition.action[1] > min_ind and transition.action[1] < max_ind:
        wall_pos = transition.action[1]*128 + offset
        wall = np.array(trans_profile.state)
        local_wall = wall[int(wall_pos - local_win_size): int(wall_pos + local_win_size)]
        local_wall = local_wall - np.mean(local_wall)
        lwall = np.zeros(local_state_size)
        lwall[:len(local_wall)] = local_wall
        lwall[len(local_wall):] = local_wall[-1]
        X_test.append([lwall, transition.action[2:]])
        difference_profile = trans_profile.next_state - trans_profile.state
        difference_profile[difference_profile>10]=10.0
        difference_profile[difference_profile<-10]=-10.0
        difference_profile = difference_profile/10
        difference_profile = difference_profile[int(wall_pos - local_win_size): int(wall_pos + local_win_size)]
        dprof = np.zeros(local_state_size)
        dprof[:len(difference_profile)] = difference_profile
        dprof[len(difference_profile):] = difference_profile[-1]
        y_test.append(smooth_window(dprof,window_size=3))
    
def myGenerator(batch_size = 16, num_batches = 32, image_noise = 0.001,action_noise = 0.001):
    batch_num = 0
    while batch_num < num_batches:
        
        train_data_slice = np.random.choice(np.arange(len(X_train)),size = batch_size, replace = False)
        validation_data_slice = np.random.choice(np.arange(len(X_test)),
                                                 size = min(8,batch_size), replace = False)
        
        xtrain = [X_train[int(val)] for val in train_data_slice]
        ytrain = [y_train[int(val)] for val in train_data_slice]
        
        xtest = [X_test[int(val)] for val in validation_data_slice]
        ytest = [y_test[int(val)] for val in validation_data_slice]
        
        #Convert to tensorflow arrays - training data
        xtrain_images = np.zeros(shape=(batch_size, xtrain[0][0].shape[0]))
        for ind in range(len(train_data_slice)): 
            xtrain_images[ind,:] = xtrain[ind][0] + \
            np.random.normal(loc=0.0, scale = image_noise, size=(len(xtrain[ind][0])))                  
        xtrain_images = tf.stack(xtrain_images)
        
        xtrain_actions = np.zeros(shape=(batch_size, len(xtrain[0][1])))
        #print(xtrain_actions[ind,:], xtrain[1][1])
        for ind in range(len(train_data_slice)): 
            xtrain_actions[ind,:] = xtrain[ind][1]
            
        xtrain_actions = tf.stack(xtrain_actions)
        xtrain = [xtrain_images[:,:,None], xtrain_actions]
        
        #Convert to tensorflow arrays - testing data
        xtest_images = np.zeros(shape=((len(xtest)), xtest[0][0].shape[0]))
        for ind in range(len(validation_data_slice)): 
            xtest_images[ind,:] = xtest[ind][0] + \
            np.random.normal(loc=0.0, scale = image_noise, size=(len(xtest[ind][0])))                    
        xtest_images = tf.stack(xtest_images)
        
        xtest_actions = np.zeros(shape=(len(xtest), len(xtest[0][1])))
        for ind in range(len(validation_data_slice)): 
            #xtest_actions[ind,:] = xtest[ind][1] + np.random.normal(loc=0.0, scale = action_noise,size=(6))
            xtest_actions[ind,:] = xtest[ind][1] + np.random.normal(loc=0.0, scale = action_noise,size=(2))
            
        xtest_actions = tf.stack(xtest_actions)
        
        xtest = [xtest_images[:,:,None], xtest_actions]
        
        yield xtrain, tf.stack(ytrain), xtest, tf.stack(ytest)
        batch_num+=1
        
In [25]:
#Let's see what it looks like on one of the validation sets...
font_size = 10
mygen = myGenerator(batch_size = 12, num_batches = 10)

mxtrain, mytrain, mxvalid, myvalid = next(mygen)

predicted_wall = ynet(mxvalid)

fig, axes = plt.subplots(nrows=8, ncols=3, figsize = (10,10))
for row in range(8):
    axes[row,0].plot(mxvalid[0][row,:])
    vval = mxvalid[1][row,0]
    pval = mxvalid[1][row,1]
    
    axes[row,0].set_title('Input, V = {:.2f} {:.2f}ms'.format(vval, pval), fontsize = 10)
    
    axes[row,1].set_title('Correct Output', fontsize = font_size)
    axes[row,1].plot(myvalid[row][:])
    
    axes[row,2].plot(predicted_wall[:,:-2][row], 'b-', label = 'prediction')
    axes[row,2].plot(myvalid[row][:], 'r-', label = 'valid')
    axes[row,2].set_title('Predicted Output', fontsize = font_size)
    axes[row,2].legend(loc = [1.0, 0.5])
    
fig.tight_layout()
In [26]:
#Let's see what it looks like on one of the validation sets...
font_size = 10
mygen = myGenerator(batch_size = 12, num_batches = 10)

mxtrain, mytrain, mxvalid, myvalid = next(mygen)

predicted_wall = ynet(mxvalid)

fig, axes = plt.subplots(nrows=8, ncols=3, figsize = (10,10))
for row in range(8):
    axes[row,0].plot(mxvalid[0][row,:])
    vval = mxvalid[1][row,0]
    pval = mxvalid[1][row,1]
    
    axes[row,0].set_title('Input, V = {:.2f} {:.2f}ms'.format(vval, pval), fontsize = 10)
    
    axes[row,1].set_title('Correct Output', fontsize = font_size)
    axes[row,1].plot(myvalid[row][:])
    
    axes[row,2].plot(predicted_wall[:,:][row], 'b-', label = 'prediction')
    axes[row,2].plot(myvalid[row][:], 'r-', label = 'valid')
    axes[row,2].set_title('Predicted Output', fontsize = font_size)
    axes[row,2].legend(loc = [1.0, 0.5])
    
fig.tight_layout()
In [27]:
#new data
l=0
bpw = []
bamp = []
for ind in range(1,301): 
    
    bias_amp, bias_pw = all_img_bias[l][1], all_img_bias[l][2]
    l+=1
    bpw.append(bias_pw)
    bamp.append(bias_amp)
    
l=0
actions_new = []
actions_norm_new = []
index_tracker_new =[]

for ind in range(1,301): 
    
     
    xpos,ypos = all_img_bias[l][-2], all_img_bias[l][-1] 
    bias_amp, bias_pw = all_img_bias[l][1], all_img_bias[l][2]
    bias_pw = bias_pw*1000
    
    xpos_norm = xpos/pix
    ypos_norm = ypos/pix
    bias_amp_norm = bias_amp/10
    bias_pw_norm = bias_pw/500
    
    index_tracker.append((ind,l))
    l+=1

    
    actions_new.append([xpos,ypos, bias_amp, bias_pw])
    actions_norm_new.append([xpos_norm, ypos_norm, bias_amp_norm, bias_pw_norm])

phase_images_segmented = np.copy(amp_images_new)
phase_images_segmented = normalize_images(phase_images_segmented)
phase_images_segmented[phase_images_segmented<0.2] = 0
phase_images_segmented[phase_images_segmented>=0.4] = 1


transitions = []
transitions_norm = []
transitions_profiles = []
for ind in range(1, len(phase_images_new)):
    tnew = namedtuple('Transition', ['state','action', 'next_state'])
    tnew_norm = namedtuple('Transition', ['state','action', 'next_state'])
    tnew_prof = namedtuple('Transition', ['state','action', 'next_state'])
    
    tnew.state = phase_images_segmented[ind-1]
    tnew.next_state = phase_images_segmented[ind]
    tnew.action = actions_new[ind]
    
    tnew_norm.state = phase_images_segmented[ind-1]
    tnew_norm.next_state = phase_images_segmented[ind]
    tnew_norm.action = actions_norm_new[ind]
    
    state = tnew.state
    next_state = tnew.next_state
    shift, error, diffphase = phase_cross_correlation(state, next_state,
                                                  upsample_factor=3)
    offset_image = fourier_shift(np.fft.fftn(next_state), shift)
    offset_image = np.fft.ifftn(offset_image)
    next_state = offset_image.real
    wps = return_norm_wall_loc(state, window_size=window_size) + start_pix
    wps_next = return_norm_wall_loc(next_state,window_size=window_size) + start_pix
    
    tnew_prof.state = wps
    tnew_prof.action = tnew_norm.action
    tnew_prof.next_state = wps_next
    
    #if not np.isnan(tnew.action[0]) and not np.isnan(actions[ind+1][0]):
    transitions.append(tnew)
    transitions_norm.append(tnew_norm)
    transitions_profiles.append(tnew_prof)
    
local_win_size = 7
offset=5
local_state_size = 14
train_fraction = 0.80
num_training_points = len(transitions_norm)

train_split_indices = np.random.choice(np.arange(len(transitions_norm)),
                                       (int(num_training_points*train_fraction)),
                                       replace = False)

test_split_indices = [val for val in np.arange(len(transitions_norm)) if val not in train_split_indices]

#Once we have the indices we need to make the training data. X_train, y_train, X_test, y_test
#X_train is the action, state, y_train is the state+1
#Same goes for X_test and y_test
X_train, y_train, X_test, y_test = [], [], [], []

for train_ind in train_split_indices:
    transition = transitions_norm[train_ind]
    trans_profile = transitions_profiles[train_ind]
    if transition.action[1] > min_ind and transition.action[1] < max_ind:
        wall_pos = transition.action[1]*128 + offset
        wall = np.array(trans_profile.state)
        local_wall = wall[int(wall_pos - local_win_size): int(wall_pos + local_win_size)]
        local_wall = local_wall - np.mean(local_wall)
        lwall = np.zeros(local_state_size)
        lwall[:len(local_wall)] = local_wall
        lwall[len(local_wall):] = local_wall[-1]
        X_train.append([lwall, transition.action[2:]])
        difference_profile = trans_profile.next_state - trans_profile.state
        difference_profile[difference_profile>10]=10.0
        difference_profile[difference_profile<-10]=-10.0
        difference_profile = difference_profile/10
        difference_profile = difference_profile[int(wall_pos - local_win_size): int(wall_pos + local_win_size)]
        dprof = np.zeros(local_state_size)
        dprof[:len(difference_profile)] = difference_profile
        dprof[len(difference_profile):] = difference_profile[-1]
        y_train.append(smooth_window(dprof,window_size=3))

for test_ind in test_split_indices:
    transition = transitions_norm[test_ind]
    trans_profile = transitions_profiles[test_ind]
    if transition.action[1] > min_ind and transition.action[1] < max_ind:
        wall_pos = transition.action[1]*128 + offset
        wall = np.array(trans_profile.state)
        local_wall = wall[int(wall_pos - local_win_size): int(wall_pos + local_win_size)]
        local_wall = local_wall - np.mean(local_wall)
        lwall = np.zeros(local_state_size)
        lwall[:len(local_wall)] = local_wall
        lwall[len(local_wall):] = local_wall[-1]
        X_test.append([lwall, transition.action[2:]])
        difference_profile = trans_profile.next_state - trans_profile.state
        difference_profile[difference_profile>10]=10.0
        difference_profile[difference_profile<-10]=-10.0
        difference_profile = difference_profile/10
        difference_profile = difference_profile[int(wall_pos - local_win_size): int(wall_pos + local_win_size)]
        dprof = np.zeros(local_state_size)
        dprof[:len(difference_profile)] = difference_profile
        dprof[len(difference_profile):] = difference_profile[-1]
        y_test.append(smooth_window(dprof,window_size=3))
    
def myGenerator(batch_size = 16, num_batches = 32, image_noise = 0.001,action_noise = 0.001):
    batch_num = 0
    while batch_num < num_batches:
        
        train_data_slice = np.random.choice(np.arange(len(X_train)),size = batch_size, replace = False)
        validation_data_slice = np.random.choice(np.arange(len(X_test)),
                                                 size = min(8,batch_size), replace = False)
        
        xtrain = [X_train[int(val)] for val in train_data_slice]
        ytrain = [y_train[int(val)] for val in train_data_slice]
        
        xtest = [X_test[int(val)] for val in validation_data_slice]
        ytest = [y_test[int(val)] for val in validation_data_slice]
        
        #Convert to tensorflow arrays - training data
        xtrain_images = np.zeros(shape=(batch_size, xtrain[0][0].shape[0]))
        for ind in range(len(train_data_slice)): 
            xtrain_images[ind,:] = xtrain[ind][0] + \
            np.random.normal(loc=0.0, scale = image_noise, size=(len(xtrain[ind][0])))                  
        xtrain_images = tf.stack(xtrain_images)
        
        xtrain_actions = np.zeros(shape=(batch_size, len(xtrain[0][1])))
        #print(xtrain_actions[ind,:], xtrain[1][1])
        for ind in range(len(train_data_slice)): 
            xtrain_actions[ind,:] = xtrain[ind][1]
            
        xtrain_actions = tf.stack(xtrain_actions)
        xtrain = [xtrain_images[:,:,None], xtrain_actions]
        
        #Convert to tensorflow arrays - testing data
        xtest_images = np.zeros(shape=((len(xtest)), xtest[0][0].shape[0]))
        for ind in range(len(validation_data_slice)): 
            xtest_images[ind,:] = xtest[ind][0] + \
            np.random.normal(loc=0.0, scale = image_noise, size=(len(xtest[ind][0])))                    
        xtest_images = tf.stack(xtest_images)
        
        xtest_actions = np.zeros(shape=(len(xtest), len(xtest[0][1])))
        for ind in range(len(validation_data_slice)): 
            #xtest_actions[ind,:] = xtest[ind][1] + np.random.normal(loc=0.0, scale = action_noise,size=(6))
            xtest_actions[ind,:] = xtest[ind][1] + np.random.normal(loc=0.0, scale = action_noise,size=(2))
            
        xtest_actions = tf.stack(xtest_actions)
        
        xtest = [xtest_images[:,:,None], xtest_actions]
        
        yield xtrain, tf.stack(ytrain), xtest, tf.stack(ytest)
        batch_num+=1    
In [28]:
#Let's see what it looks like on one of the validation sets...
font_size = 10
mygen = myGenerator(batch_size = 12, num_batches = 10)

mxtrain, mytrain, mxvalid, myvalid = next(mygen)

predicted_wall = ynet(mxvalid)

fig, axes = plt.subplots(nrows=8, ncols=3, figsize = (10,10))
for row in range(8):
    axes[row,0].plot(mxvalid[0][row,:])
    vval = mxvalid[1][row,0]
    pval = mxvalid[1][row,1]
    
    axes[row,0].set_title('Input, V = {:.2f} {:.2f}ms'.format(vval, pval), fontsize = 10)
    
    axes[row,1].set_title('Correct Output', fontsize = font_size)
    axes[row,1].plot(myvalid[row][:])
    
    axes[row,2].plot(predicted_wall[:,:-2][row], 'b-', label = 'prediction')
    axes[row,2].plot(myvalid[row][:], 'r-', label = 'valid')
    axes[row,2].set_title('Predicted Output', fontsize = font_size)
    axes[row,2].legend(loc = [1.0, 0.5])
    
fig.tight_layout()
In [29]:
#Let's see what it looks like on one of the validation sets...
font_size = 10
mygen = myGenerator(batch_size = 12, num_batches = 10)

mxtrain, mytrain, mxvalid, myvalid = next(mygen)

predicted_wall = ynet(mxvalid)

fig, axes = plt.subplots(nrows=8, ncols=3, figsize = (10,10))
for row in range(8):
    axes[row,0].plot(mxvalid[0][row,:])
    vval = mxvalid[1][row,0]
    pval = mxvalid[1][row,1]
    
    axes[row,0].set_title('Input, V = {:.2f} {:.2f}s'.format(vval, pval), fontsize = 10)
    
    axes[row,1].set_title('Correct Output', fontsize = font_size)
    axes[row,1].plot(myvalid[row][:])
    
    axes[row,2].plot(predicted_wall[:,:][row], 'b-', label = 'Model Prediction')
    axes[row,2].plot(myvalid[row][:], 'r-', label = 'Observed')
    axes[row,2].set_title('Predicted Output', fontsize = font_size)
    axes[row,2].legend(loc = [1.01, 0.15])
    
fig.tight_layout()
In [30]:
#Let's see what it looks like on one of the validation sets...
font_size = 10
mygen = myGenerator(batch_size = 12, num_batches = 10)

mxtrain, mytrain, mxvalid, myvalid = next(mygen)

predicted_wall = ynet(mxvalid)

fig, axes = plt.subplots(nrows=8, ncols=3, figsize = (10,10))
for row in range(8):
    axes[row,0].plot(mxvalid[0][row,:])
    vval = mxvalid[1][row,0]
    pval = mxvalid[1][row,1]
    
    axes[row,0].set_title('Input, V = {:.2f} {:.2f}ms'.format(vval, pval), fontsize = 10)
    
    axes[row,1].set_title('Correct Output', fontsize = font_size)
    axes[row,1].plot(myvalid[row][:])
    
    axes[row,2].plot(predicted_wall[:,:][row], 'b-', label = 'prediction')
    axes[row,2].plot(myvalid[row][:], 'r-', label = 'valid')
    axes[row,2].set_title('Predicted Output', fontsize = font_size)
    axes[row,2].legend(loc = [1.0, 0.5])
    
fig.tight_layout()

Wall profiles as function of V and PW¶

In [31]:
outputs = []
test_images, test_actions = [], []

#test_image = mxvalid[0][1]
test_image = np.full(14,-0.1)
#test_image = row

voltages = [-1.0,-0.9,-0.8,-0.7,-0.6,-0.5,-0.4,-0.3,-0.2,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]
pwidths = [0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]


for v in voltages:
    #for p in pwidths:
        test_action_val_new_valv=v
        test_action_val_new_valp=0.5    
        test_actions.append([test_action_val_new_valv, test_action_val_new_valp])

        test_images.append(test_image)

test_input = [tf.stack(test_images), tf.stack(test_actions)] 

output = ynet(test_input)

df = pd.DataFrame(output.numpy())
df = df*7.8125
#maxValues = df.abs().max(axis = 1)
#maxValues=maxValues*7.8125

new_actions = []
for i in range(len(test_actions)):
    row = test_actions[i]
    row = np.array(row)
    row[0]=row[0]*10
    temp=row[1]*500
    row[1]=round(temp)
    new_actions.append(row)

actionsdf = pd.DataFrame (new_actions, columns = ['Voltage (V) ','Pulse Width (ms)'])
actionsdf=actionsdf.merge(df, left_index=True, right_index=True)

labels = [-10,-9,-8,-7,-6,-5,-4,-3,-2,2,3,4,5,6,7,8,9,10]
for i in range(18):
    plt.plot(df.T[i], label=labels[i])
    plt.legend(loc=(1,0), title='Voltage V')
    plt.ylabel("Displacement (nm)")
In [32]:
outputs = []
test_images, test_actions = [], []

#test_image = mxvalid[0][1]
test_image = np.full(14,0.5)
#test_image = row

voltages = [-1.0,-0.9,-0.8,-0.7,-0.6,-0.5,-0.4,-0.3,-0.2,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]
pwidths = [0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]


for v in voltages:
    #for p in pwidths:
        test_action_val_new_valv=v
        test_action_val_new_valp=0.5    
        test_actions.append([test_action_val_new_valv, test_action_val_new_valp])

        test_images.append(test_image)

test_input = [tf.stack(test_images), tf.stack(test_actions)] 

output = ynet(test_input)

df = pd.DataFrame(output.numpy())
df = df*7.8125
#maxValues = df.abs().max(axis = 1)
#maxValues=maxValues*7.8125

new_actions = []
for i in range(len(test_actions)):
    row = test_actions[i]
    row = np.array(row)
    row[0]=row[0]*10
    temp=row[1]*500
    row[1]=round(temp)
    new_actions.append(row)

actionsdf = pd.DataFrame (new_actions, columns = ['Voltage (V) ','Pulse Width (ms)'])
actionsdf=actionsdf.merge(df, left_index=True, right_index=True)

labels = [-10,-9,-8,-7,-6,-5,-4,-3,-2,2,3,4,5,6,7,8,9,10]
for i in range(18):
    plt.plot(df.T[i], label=labels[i])
    plt.legend(loc=(1,0), title='Voltage V')
    plt.ylabel("Displacement (nm)")
In [33]:
outputs = []
test_images, test_actions = [], []

#test_image = mxvalid[0][1]
test_image = np.full(14,0.5)
#test_image = row

voltages = [-1.0,-0.9,-0.8,-0.7,-0.6,-0.5,-0.4,-0.3,-0.2,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]
pwidths = [0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]


#for v in voltages:

for p in pwidths:
    test_action_val_new_valv=0.5
    test_action_val_new_valp=p    
    test_actions.append([test_action_val_new_valv, test_action_val_new_valp])

    test_images.append(test_image)

test_input = [tf.stack(test_images), tf.stack(test_actions)] 

output = ynet(test_input)

df = pd.DataFrame(output.numpy())
df = df*7.8125
#maxValues = df.abs().max(axis = 1)
#maxValues=maxValues*7.8125

new_actions = []
for i in range(len(test_actions)):
    row = test_actions[i]
    row = np.array(row)
    row[0]=row[0]*10
    temp=row[1]*500
    row[1]=round(temp)
    new_actions.append(row)

actionsdf = pd.DataFrame (new_actions, columns = ['Voltage (V) ','Pulse Width (ms)'])
actionsdf=actionsdf.merge(df, left_index=True, right_index=True)

labels = [0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]
labels=labels*500
for i in range(9):
    plt.plot(df.T[i], label=labels[i])
    plt.legend(loc=(1,0), title='Pulse Width ms')
    plt.ylabel("Displacement (nm)")

    

arr = np.trapz((df.T-df.T.min()))
In [ ]:
 
In [34]:
out = np.array(output[6])
out = np.pad(out, pad_width=1)
plt.plot(out)
Out[34]:
[<matplotlib.lines.Line2D at 0x7f85caf15d60>]
In [35]:
outputs = []
test_images, test_actions = [], []

#test_image = mxvalid[0][1]
#test_image = np.full(14,0.5)
test_image = out

voltages = [-1.0,-0.9,-0.8,-0.7,-0.6,-0.5,-0.4,-0.3,-0.2,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]
pwidths = [0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]


for v in voltages:
    #for p in pwidths:
        test_action_val_new_valv=v
        test_action_val_new_valp=0.5    
        test_actions.append([test_action_val_new_valv, test_action_val_new_valp])

        test_images.append(test_image)

test_input = [tf.stack(test_images), tf.stack(test_actions)] 

output = ynet(test_input)

df = pd.DataFrame(output.numpy())
df = df*7.8125
#maxValues = df.abs().max(axis = 1)
#maxValues=maxValues*7.8125

new_actions = []
for i in range(len(test_actions)):
    row = test_actions[i]
    row = np.array(row)
    row[0]=row[0]*10
    temp=row[1]*500
    row[1]=round(temp)
    new_actions.append(row)

actionsdf = pd.DataFrame (new_actions, columns = ['Voltage (V) ','Pulse Width (ms)'])
actionsdf=actionsdf.merge(df, left_index=True, right_index=True)

labels = [-10.0,-9.0,-8.0,-7.0,-6.0,-5.0,-4.0,-3.0,-2.0,2.0,3.0,4.0,5.0,6.0,7.0,8.0,9.0,10.0]
for i in range(18):
    plt.plot(df.T[i], label=labels[i])
    plt.legend(loc=(1.01,-0.1), title='Voltage V')
    plt.ylabel("Displacement (nm)")
    plt.xticks([0,2,4,6,8,10],[0,2*7.8125,4*7.8125,6*7.8125,8*7.8125,10*7.8125])
    plt.xlabel("X-Position (nm)")
In [ ]:
 
In [36]:
outputs = []
test_images, test_actions = [], []

#test_image = mxvalid[0][1]
#test_image = np.full(14,0.5)
test_image = out

voltages = [-1.0,-0.9,-0.8,-0.7,-0.6,-0.5,-0.4,-0.3,-0.2,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]
pwidths = [0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]


#for v in voltages:

for p in pwidths:
    test_action_val_new_valv=-0.5
    test_action_val_new_valp=p    
    test_actions.append([test_action_val_new_valv, test_action_val_new_valp])

    test_images.append(test_image)

test_input = [tf.stack(test_images), tf.stack(test_actions)] 

output = ynet(test_input)

df = pd.DataFrame(output.numpy())
df = df*7.8125
#maxValues = df.abs().max(axis = 1)
#maxValues=maxValues*7.8125

new_actions = []
for i in range(len(test_actions)):
    row = test_actions[i]
    row = np.array(row)
    row[0]=row[0]*10
    temp=row[1]*500
    row[1]=round(temp)
    new_actions.append(row)

actionsdf = pd.DataFrame (new_actions, columns = ['Voltage (V) ','Pulse Width (ms)'])
actionsdf=actionsdf.merge(df, left_index=True, right_index=True)

labels = [0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]
labels=labels*500
for i in range(9):
    plt.plot(df.T[i], label=labels[i])
    plt.legend(loc=(1.01,0.1), title='Pulse width (ms)')
    plt.ylabel("Displacement (nm)",fontweight='bold')
    plt.xticks([0,2,4,6,8,10],[0,2*7.8125,4*7.8125,6*7.8125,8*7.8125,10*7.8125])
    plt.xlabel("X-Position (nm)",fontweight='bold')

    

arr = np.trapz((df.T-df.T.min()))
In [37]:
outputs = []
test_images, test_actions = [], []

#test_image = mxvalid[0][1]
#test_image = np.full(14,(-0.5+i/10))
test_image = out
print(test_image)
add = np.array(test_image[1:-1])
plt.plot(test_image[1:-1])
plt.show()

#voltages = [0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]
voltages = [-0.1,-0.2,-0.3,-0.4,-0.5,-0.6,-0.7,-0.8,-0.9,-1.0]
pwidths = [0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]


for v in voltages:
    #for p in pwidths:
        test_action_val_new_valv=v
        test_action_val_new_valp=0.5    
        test_actions.append([test_action_val_new_valv, test_action_val_new_valp])

        test_images.append(test_image)

test_input = [tf.stack(test_images), tf.stack(test_actions)] 

output = ynet(test_input)

df = pd.DataFrame(output.numpy())
df = df*7.8125
#maxValues = df.abs().max(axis = 1)
#maxValues=maxValues*7.8125

new_actions = []
for i in range(len(test_actions)):
    row = test_actions[i]
    row = np.array(row)
    row[0]=row[0]*10
    temp=row[1]*500
    row[1]=round(temp)
    new_actions.append(row)

actionsdf = pd.DataFrame (new_actions, columns = ['Voltage (V) ','Pulse Width (ms)'])
actionsdf=actionsdf.merge(df, left_index=True, right_index=True)

labels = [-1.0,-2.0,-3.0,-4.0,-5.0,-6.0,-7.0,-8.0,-9.0,-10.0]
#labels = [1.0,2.0,3.0,4.0,5.0,6.0,7.0,8.0,9.0,10.0]

for i in range(10):
    plotstuff = df.T[i]+add[:]
        #print(add)
    plt.plot(plotstuff, label=labels[i])
    #plt.plot(df.T[i], label=labels[i])
    plt.legend(loc=(1.01,0.2), title='Voltage V')
    plt.ylabel("Displacement (nm)",fontweight='bold')
    plt.xticks([0,2,4,6,8,10],[0,2*7.8125,4*7.8125,6*7.8125,8*7.8125,10*7.8125])
    plt.xlabel("X-Position (nm)",fontweight='bold')
plt.show()
[ 0.         -0.01609449 -0.02431608  0.00636413  0.02215511  0.04412076
  0.05582548  0.07156721  0.04594366  0.01798202  0.00879482  0.02019078
  0.04002864  0.        ]
In [38]:
outputs = []
test_images, test_actions = [], []

#test_image = mxvalid[0][1]
test_image = np.full(14,-0.1)
#test_image = out

voltages = [-1.0,-0.9,-0.8,-0.7,-0.6,-0.5,-0.4,-0.3,-0.2,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]
pwidths = [0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]


for v in voltages:
    #for p in pwidths:
        test_action_val_new_valv=v
        test_action_val_new_valp=0.5    
        test_actions.append([test_action_val_new_valv, test_action_val_new_valp])

        test_images.append(test_image)

test_input = [tf.stack(test_images), tf.stack(test_actions)] 

output = ynet(test_input)

df = pd.DataFrame(output.numpy())
df = df*7.8125
#maxValues = df.abs().max(axis = 1)
#maxValues=maxValues*7.8125

new_actions = []
for i in range(len(test_actions)):
    row = test_actions[i]
    row = np.array(row)
    row[0]=row[0]*10
    temp=row[1]*500
    row[1]=round(temp)
    new_actions.append(row)

actionsdf = pd.DataFrame (new_actions, columns = ['Voltage (V) ','Pulse Width (ms)'])
actionsdf=actionsdf.merge(df, left_index=True, right_index=True)

labels = [-10.0,-9.0,-8.0,-7.0,-6.0,-5.0,-4.0,-3.0,-2.0,2.0,3.0,4.0,5.0,6.0,7.0,8.0,9.0,10.0]
for i in range(18):
    plt.plot(df.T[i], label=labels[i])
    plt.legend(loc=(1.01,-0.08), title='Voltage V')
    plt.ylabel("Displacement (nm)")
    plt.xticks([0,2,4,6,8,10],[0,2*7.8125,4*7.8125,6*7.8125,8*7.8125,10*7.8125])
    plt.xlabel("X-Position (nm)")
In [39]:
outputs = []
test_images, test_actions = [], []

#test_image = mxvalid[0][1]
#test_image = np.full(14,(-0.3))
test_image = out
print(test_image)
add = np.array(test_image[1:-1])


#voltages = [0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]
voltages = [0.65,1.30,1.95,2.61,3.26]
#voltages = [-0.65,-1.30,-1.95,-2.61,-3.26]
#voltages = [-0.1,-0.2,-0.3,-0.4,-0.5,-0.6,-0.7,-0.8,-0.9,-1.0]
pwidths = [0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]


for v in voltages:
    #for p in pwidths:
        test_action_val_new_valv=v
        test_action_val_new_valp=0.5    
        test_actions.append([test_action_val_new_valv, test_action_val_new_valp])

        test_images.append(test_image)

test_input = [tf.stack(test_images), tf.stack(test_actions)] 

output = ynet(test_input)

df = pd.DataFrame(output.numpy())
df = df*7.8125
#maxValues = df.abs().max(axis = 1)
#maxValues=maxValues*7.8125

new_actions = []
for i in range(len(test_actions)):
    row = test_actions[i]
    row = np.array(row)
    row[0]=row[0]*10
    temp=row[1]*500
    row[1]=round(temp)
    new_actions.append(row)

actionsdf = pd.DataFrame (new_actions, columns = ['Voltage (V) ','Pulse Width (ms)'])
actionsdf=actionsdf.merge(df, left_index=True, right_index=True)

#labels = [-1.0,-2.0,-3.0,-4.0,-5.0,-6.0,-7.0,-8.0,-9.0,-10.0]
#labels = [1.0,2.0,3.0,4.0,5.0,6.0,7.0,8.0,9.0,10.0]
#labels = [-0.65,-1.30,-1.95,-2.61,-3.26]
labels = [0.65,1.30,1.95,2.61,3.26]
for i in range(5):
    plotstuff = df.T[i]+(add[:]*7.8125)
    plt.plot(plotstuff, label=labels[i])
        #print(add)
    #plt.plot(df.T[i], label=labels[i])
    #plt.plot(df.T[i], label=labels[i])
    plt.legend( title='Voltage V')
    plt.ylabel("Displacement (nm)",fontweight='bold')
    plt.plot(add*7.1825, 'b--')
    plt.xticks([0,2,4,6,8,10],[0,2*7.8125,4*7.8125,6*7.8125,8*7.8125,10*7.8125])
    plt.xlabel("X-Position (nm)",fontweight='bold')
plt.show()
[ 0.         -0.01609449 -0.02431608  0.00636413  0.02215511  0.04412076
  0.05582548  0.07156721  0.04594366  0.01798202  0.00879482  0.02019078
  0.04002864  0.        ]
In [40]:
outputs = []
test_images, test_actions = [], []

#test_image = mxvalid[0][1]
#test_image = np.full(14,(-0.3))
test_image = out
print(test_image)
add = np.array(test_image[1:-1])


#voltages = [0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]
#voltages = [0.65,1.30,1.95,2.61,3.26]
voltages = [-0.65,-1.30,-1.95,-2.61,-3.26]
#voltages = [-0.1,-0.2,-0.3,-0.4,-0.5,-0.6,-0.7,-0.8,-0.9,-1.0]
pwidths = [0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0]


for v in voltages:
    #for p in pwidths:
        test_action_val_new_valv=v
        test_action_val_new_valp=0.5    
        test_actions.append([test_action_val_new_valv, test_action_val_new_valp])

        test_images.append(test_image)

test_input = [tf.stack(test_images), tf.stack(test_actions)] 

output = ynet(test_input)

df = pd.DataFrame(output.numpy())
df = df*7.8125
#maxValues = df.abs().max(axis = 1)
#maxValues=maxValues*7.8125

new_actions = []
for i in range(len(test_actions)):
    row = test_actions[i]
    row = np.array(row)
    row[0]=row[0]*10
    temp=row[1]*500
    row[1]=round(temp)
    new_actions.append(row)

actionsdf = pd.DataFrame (new_actions, columns = ['Voltage (V) ','Pulse Width (ms)'])
actionsdf=actionsdf.merge(df, left_index=True, right_index=True)

#labels = [-1.0,-2.0,-3.0,-4.0,-5.0,-6.0,-7.0,-8.0,-9.0,-10.0]
#labels = [1.0,2.0,3.0,4.0,5.0,6.0,7.0,8.0,9.0,10.0]
labels = [-0.65,-1.30,-1.95,-2.61,-3.26]
#labels = [0.65,1.30,1.95,2.61,3.26]
for i in range(5):
    plotstuff = df.T[i]+(add[:]*7.8125)
    plt.plot(plotstuff, label=labels[i])
        #print(add)
    #plt.plot(df.T[i], label=labels[i])
    #plt.plot(df.T[i], label=labels[i])
    plt.legend( title='Voltage V')
    plt.ylabel("Displacement (nm)",fontweight='bold')
    plt.plot(add*7.1825, 'b--')
    plt.xticks([0,2,4,6,8,10],[0,2*7.8125,4*7.8125,6*7.8125,8*7.8125,10*7.8125])
    plt.xlabel("X-Position (nm)",fontweight='bold')
plt.show()
[ 0.         -0.01609449 -0.02431608  0.00636413  0.02215511  0.04412076
  0.05582548  0.07156721  0.04594366  0.01798202  0.00879482  0.02019078
  0.04002864  0.        ]
In [ ]: